Line data Source code
1 : // $Id$
2 : //**************************************************************************
3 : //* This file is property of and copyright by the *
4 : //* ALICE Experiment at CERN, All rights reserved. *
5 : //* *
6 : //* Primary Authors: Matthias Richter <Matthias.Richter@ift.uib.no> *
7 : //* *
8 : //* Permission to use, copy, modify and distribute this software and its *
9 : //* documentation strictly for non-commercial purposes is hereby granted *
10 : //* without fee, provided that the above copyright notice appears in all *
11 : //* copies and that both the copyright notice and this permission notice *
12 : //* appear in the supporting documentation. The authors make no claims *
13 : //* about the suitability of this software for any purpose. It is *
14 : //* provided "as is" without express or implied warranty. *
15 : //**************************************************************************
16 :
17 : /// @file AliHLTTPCDataPublisherComponent.cxx
18 : /// @author Matthias Richter
19 : /// @date 2011-08-08
20 : /// @brief
21 : ///
22 :
23 : #include "AliHLTTPCDataPublisherComponent.h"
24 : #include "AliHLTTPCDefinitions.h"
25 : #include "AliHLTTPCGeometry.h"
26 : #include "AliHLTTPCClusterMCData.h"
27 : #include "AliHLTTPCDataCompressionDecoder.h"
28 : #include "AliHLTTPCRawClustersDescriptor.h"
29 : #include "AliHLTPluginBase.h"
30 : #include "AliHLTSystem.h"
31 : #include "AliHLTOUT.h"
32 : #include "AliHLTDAQ.h"
33 : #include "AliHLTTemplates.h"
34 : #include "AliLog.h"
35 : #include <vector>
36 : #include <memory>
37 : #include <algorithm>
38 :
39 6 : ClassImp(AliHLTTPCDataPublisherComponent)
40 :
41 : AliHLTTPCDataPublisherComponent::AliHLTTPCDataPublisherComponent()
42 3 : : AliHLTRawReaderPublisherComponent()
43 3 : , fMode(kPublisherModeDefault)
44 3 : , fArraySelected(NULL)
45 3 : , fClusters(NULL)
46 3 : , fpDecoder(NULL)
47 15 : {
48 : /// constructor
49 6 : }
50 :
51 0 : AliHLTTPCDataPublisherComponent::~AliHLTTPCDataPublisherComponent()
52 18 : {
53 : /// destructor
54 3 : if (fpDecoder) delete fpDecoder;
55 3 : fpDecoder=NULL;
56 9 : }
57 :
58 :
59 : const char* AliHLTTPCDataPublisherComponent::GetComponentID()
60 : {
61 : /// inherited from AliHLTComponent: id of the component
62 402 : return "TPCDataPublisher";
63 : }
64 :
65 : AliHLTComponentDataType AliHLTTPCDataPublisherComponent::GetOutputDataType()
66 : {
67 : //
68 : // overwrite AliHLTRawDataPublisherComponent::GetOutputDataType(),
69 : // since the component can publishes not only raw data type, but also decompressed clusters
70 : //
71 0 : return kAliHLTMultipleDataType;
72 : }
73 :
74 : int AliHLTTPCDataPublisherComponent::GetOutputDataTypes(AliHLTComponentDataTypeList& tgtList)
75 : {
76 : // see header file for class documentation
77 0 : tgtList.clear();
78 0 : tgtList.push_back( kAliHLTDataTypeDDLRaw | kAliHLTDataOriginTPC );
79 0 : tgtList.push_back( AliHLTTPCDefinitions::RawClustersDataType() | kAliHLTDataOriginTPC );
80 0 : tgtList.push_back( AliHLTTPCDefinitions::RawClustersDescriptorDataType() | kAliHLTDataOriginTPC );
81 0 : return tgtList.size();
82 : }
83 :
84 :
85 : AliHLTComponent* AliHLTTPCDataPublisherComponent::Spawn()
86 : {
87 : /// inherited from AliHLTComponent: spawn function.
88 0 : return new AliHLTTPCDataPublisherComponent;
89 0 : }
90 :
91 : int AliHLTTPCDataPublisherComponent::GetEvent(const AliHLTComponentEventData& evtData,
92 : AliHLTComponentTriggerData& trigData,
93 : AliHLTUInt8_t* outputPtr,
94 : AliHLTUInt32_t& outputSize,
95 : AliHLTComponentBlockDataList& outputBlocks)
96 : {
97 : /// inherited from AliHLTProcessor: data processing
98 0 : if (!IsDataEvent()) return 0;
99 :
100 0 : AliHLTUInt32_t capacity=outputSize;
101 0 : outputSize=0;
102 : int iResult=0;
103 :
104 : AliHLTUInt32_t offset=0;
105 :
106 0 : if( fClusters && ( CheckMode(kPublishClustersAll) || CheckMode(kRegisterClusterBlocks) ) ){
107 :
108 0 : fClusters->Clear();
109 :
110 0 : if (CheckMode(kPublishClustersAll)) {
111 : // set the target buffer only if the clusters should be published
112 0 : fClusters->SetTargetBuffer(outputPtr, capacity);
113 0 : } else if (CheckMode(kRegisterClusterBlocks)) {
114 : // data blocks are registered in the container, track model cluster blocks
115 : // are unpacked but not stored in order to find the included partitions
116 : }
117 :
118 0 : iResult = ReadClusterFromHLTInput(fClusters);
119 :
120 0 : if( CheckMode(kPublishClustersAll) ) { // write out cluster blocks
121 :
122 0 : if( iResult>=0 ) iResult = fClusters->GetState();
123 :
124 0 : if( iResult >= 0 ){
125 0 : AliHLTComponentBlockDataList clusterBlocks;
126 0 : fClusters->CopyBlockDescriptors(clusterBlocks);
127 :
128 0 : if( clusterBlocks.size() > 0 ){
129 0 : for (AliHLTComponentBlockDataList::iterator bd=clusterBlocks.begin(); bd!=clusterBlocks.end(); bd++) {
130 : // set proper data type for cluster blocks
131 0 : bd->fDataType = (AliHLTTPCDefinitions::RawClustersDataType() | kAliHLTDataOriginTPC );
132 : // find end of written cluster data
133 0 : if (offset < bd->fOffset + bd->fSize) offset = bd->fOffset + bd->fSize;
134 : }
135 :
136 : // add "merged clusters" descriptor. Even when compressed clusters were not merged originally, they were automaticalla merged during decoding
137 :
138 0 : AliHLTTPCRawClustersDescriptor desc;
139 0 : desc.SetMergedClustersFlag(1);
140 :
141 0 : AliHLTComponent_BlockData bd;
142 0 : FillBlockData(bd);
143 0 : bd.fOffset = offset;
144 0 : bd.fSize = sizeof(AliHLTTPCRawClustersDescriptor);
145 0 : bd.fDataType = AliHLTTPCDefinitions::RawClustersDescriptorDataType() | kAliHLTDataOriginTPC ;
146 :
147 0 : if( offset + bd.fSize <= capacity ){
148 0 : *(AliHLTTPCRawClustersDescriptor*)(outputPtr+offset ) = desc;
149 0 : clusterBlocks.push_back(bd);
150 0 : outputBlocks.insert( outputBlocks.begin(), clusterBlocks.begin(), clusterBlocks.end() );
151 0 : }
152 0 : offset+= bd.fSize;
153 0 : }
154 0 : } else if (iResult==-ENOSPC) {
155 0 : offset = fClusters->GetBlockCount()*sizeof(AliHLTTPCRawClusterData)+
156 0 : fClusters->GetClusterCount()*sizeof(AliHLTTPCRawCluster)
157 0 : + sizeof(AliHLTTPCRawClustersDescriptor);
158 : iResult=0; // keep going to also accumulate the size for raw data blocks
159 0 : }
160 : }
161 0 : if (iResult==-ENODATA) {
162 : // return indicates absence of compressed clusters in HLTInput
163 : // but is not treated as an error further downstream
164 : iResult=0;
165 0 : }
166 : }
167 :
168 0 : if (iResult==-ENOSPC) {
169 : iResult=0; // keep going to also accumulate the size for raw data blocks
170 0 : }
171 :
172 : // Write out raw data. Virtual IsSelected() method decides which raw data blocks are required (if any).
173 :
174 0 : AliHLTUInt32_t rawSize = 0;
175 :
176 0 : if (offset <= capacity) {
177 0 : rawSize = capacity - offset;
178 0 : outputPtr += offset;
179 0 : }
180 :
181 0 : if (iResult>=0) {
182 0 : unsigned firstBlock=outputBlocks.size();
183 0 : iResult = AliHLTRawReaderPublisherComponent::GetEvent(evtData, trigData, outputPtr, rawSize, outputBlocks);
184 :
185 0 : if (iResult==-ENOSPC) {
186 : // not enough space in the buffer, fMaxSize has been updated by base class
187 0 : offset += fMaxSize;
188 : iResult = 0;
189 0 : } else if (iResult>=0) {
190 0 : if (outputBlocks.size()>firstBlock && CheckMode(kPublishRawFiltered)) {
191 0 : AliInfo(Form("publishing %lu DDL(s) for emulation of compressed TPC clusters", outputBlocks.size()-firstBlock));
192 0 : }
193 : // correct for the shifted buffer which was provided to the
194 : // GetEvent method
195 0 : for (AliHLTComponentBlockDataList::iterator bd=outputBlocks.begin();
196 0 : bd!=outputBlocks.end(); bd++) {
197 0 : if (firstBlock>0) {firstBlock--; continue;}
198 0 : bd->fOffset+=offset;
199 0 : }
200 0 : offset+=rawSize;
201 0 : }
202 0 : }
203 :
204 0 : if (iResult>=0 && capacity<offset ) {
205 : // update the size requirement
206 0 : if( fMaxSize<(int)offset ) fMaxSize=offset;
207 : iResult=-ENOSPC;
208 0 : }
209 :
210 0 : if (iResult>=0) {
211 0 : outputSize=offset;
212 0 : } else {
213 0 : outputBlocks.clear();
214 0 : outputSize = 0;
215 : }
216 :
217 : return iResult;
218 0 : }
219 :
220 : int AliHLTTPCDataPublisherComponent::ReadClusterFromHLTInput(AliHLTTPCDataPublisherComponent::AliRawClusterContainer* pContainer)
221 : {
222 : // check the HLTInput for availability of compressed data blocks
223 : int iResult=0;
224 0 : AliHLTSystem* pSystem=AliHLTPluginBase::GetInstance();
225 0 : if (!pSystem) {
226 : // global system not initialized
227 0 : return -ENODEV;
228 : }
229 0 : AliHLTOUT* pHLTInput=pSystem->RequestHLTInput();
230 0 : if (!pHLTInput) {
231 : // not HLTInput, hence not clusters
232 0 : return 0;
233 : }
234 :
235 0 : if (!fpDecoder) {
236 0 : fpDecoder=new AliHLTTPCDataCompressionDecoder;
237 0 : }
238 :
239 0 : if (!fpDecoder) {
240 0 : AliError("failed to create decoder instance");
241 0 : return -ENODEV;
242 : }
243 :
244 : AliHLTTPCDataCompressionDecoder& decoder=*fpDecoder;
245 0 : decoder.Clear();
246 0 : decoder.SetVerbosity(GetVerbosity());
247 :
248 : bool bHavePartitionRawData=false;
249 : bool bHavePartitionCompressedData=false;
250 :
251 : bool bNextBlock=false;
252 : // add cluster id and mc information data blocks
253 0 : for (bNextBlock=(pHLTInput->SelectFirstDataBlock()>=0);
254 0 : bNextBlock; bNextBlock=(pHLTInput->SelectNextDataBlock()>=0)) {
255 0 : AliHLTComponentBlockData desc;
256 0 : if ((iResult=pHLTInput->GetDataBuffer(desc))<0) {
257 0 : continue;
258 : }
259 0 : if (desc.fDataType==AliHLTTPCDefinitions::DataCompressionDescriptorDataType()) {
260 : // compression header
261 0 : if ((iResult=decoder.AddCompressionDescriptor(&desc))<0) {
262 0 : return iResult;
263 : }
264 : bHavePartitionCompressedData = true;
265 0 : }
266 0 : if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDescriptorDataType()) {
267 : // raw clusters header
268 0 : if ((iResult=decoder.AddRawClustersDescriptor(&desc))<0) {
269 0 : return iResult;
270 : }
271 : bHavePartitionRawData = true;
272 0 : }
273 0 : if (desc.fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) {
274 : // add mc information
275 0 : if ((iResult=decoder.AddClusterMCData(&desc))<0) {
276 0 : return iResult;
277 : }
278 : }
279 0 : if (desc.fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType() ||
280 0 : desc.fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) {
281 : // add cluster ids
282 0 : if ((iResult=decoder.AddClusterIds(&desc))<0) {
283 0 : return iResult;
284 : }
285 : }
286 0 : if (desc.fDataType==AliHLTTPCDefinitions::ClustersFlagsDataType()) {
287 : // add cluster flags information
288 0 : if ((iResult=decoder.AddClusterFlags(&desc))<0) {
289 0 : return iResult;
290 : }
291 : }
292 0 : }
293 :
294 0 : vector<bool> bHavePartitionData(216, false);
295 :
296 : // read data
297 : iResult=-ENODATA;
298 : int nExtractedClusters=0;
299 0 : for (bNextBlock=(pHLTInput->SelectFirstDataBlock()>=0);
300 0 : bNextBlock; bNextBlock=(pHLTInput->SelectNextDataBlock()>=0)) {
301 0 : decoder.SetPadShift(0.0);
302 0 : AliHLTComponentBlockData desc;
303 0 : if ((iResult=pHLTInput->GetDataBuffer(desc))<0) {
304 0 : continue;
305 : }
306 0 : if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDataType()) {
307 : // This is a special handling of data blocks produced with v5-01-Release
308 : // The pad shift by 0.5 was not included in the data but was applied in the
309 : // unpacking in this class. Changed in r51306, the next tag containing this
310 : // change in the online system is v5-01-Rev-07. There are only very few runs
311 : // of Sep 2011 with recorded clusters not containing the 0.5 shift
312 : // There was also a chenge in the data type of the compressed partition
313 : // cluster blocks which helps to identify the blocks which need the pad shift
314 : // here
315 0 : if (desc.fSize<sizeof(AliHLTTPCRawClusterData)) continue;
316 0 : const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(desc.fPtr);
317 0 : if (!clusterData) continue;
318 0 : if (clusterData->fVersion==1) {
319 : // compressed clusters without the pad shift
320 : // no raw clusters (version==0) have ever been recorded
321 0 : decoder.SetPadShift(0.5);
322 0 : }
323 0 : AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
324 0 : AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
325 0 : if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
326 0 : partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
327 0 : AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
328 : "block specification 0x%08x", desc.fSpecification));
329 : }
330 0 : iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
331 0 : reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
332 0 : desc.fSize,
333 0 : desc.fSpecification);
334 0 : if (iResult>=0) nExtractedClusters+=iResult;
335 : else {
336 0 : AliFatal(Form("processing of cluster block 0x%08x failed with error code %d", desc.fSpecification, iResult));
337 : }
338 0 : unsigned index=slice*AliHLTTPCGeometry::GetNumberOfPatches()+partition;
339 0 : if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
340 0 : if (bHavePartitionData[index]) {
341 0 : AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
342 : "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
343 : "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
344 : "block specification 0x%08x", desc.fSpecification));
345 : }
346 0 : bHavePartitionData[index]=true;
347 0 : if (bHavePartitionCompressedData) {
348 0 : AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
349 : "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
350 : "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
351 : "block specification 0x%08x", desc.fSpecification));
352 : }
353 : bHavePartitionRawData=true;
354 : continue;
355 0 : } else if (desc.fDataType==AliHLTTPCDefinitions::RemainingClustersCompressedDataType()) {
356 0 : AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
357 0 : AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
358 0 : if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
359 0 : partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
360 0 : AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
361 : "block specification 0x%08x", desc.fSpecification));
362 : }
363 0 : iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
364 0 : reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
365 0 : desc.fSize,
366 0 : desc.fSpecification);
367 0 : if (iResult>0) nExtractedClusters+=iResult;
368 0 : unsigned index=slice*AliHLTTPCGeometry::GetNumberOfPatches()+partition;
369 0 : if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
370 0 : if (bHavePartitionData[index]) {
371 0 : AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
372 : "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
373 : "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
374 : "block specification 0x%08x", desc.fSpecification));
375 : }
376 0 : bHavePartitionData[index]=true;
377 0 : bHavePartitionData[index]=true;
378 0 : if (bHavePartitionRawData) {
379 0 : AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
380 : "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
381 : "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
382 : "block specification 0x%08x", desc.fSpecification));
383 : }
384 : bHavePartitionCompressedData=true;
385 : continue;
386 0 : } else if (desc.fDataType==AliHLTTPCDefinitions::ClusterTracksCompressedDataType()) {
387 0 : iResult=decoder.ReadTrackModelClustersCompressed(pContainer->BeginTrackModelClusterBlock(0),
388 0 : reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
389 0 : desc.fSize,
390 0 : desc.fSpecification);
391 0 : continue;
392 : }
393 0 : }
394 :
395 0 : pSystem->ReleaseHLTInput(pHLTInput);
396 :
397 0 : if (iResult<0) return iResult;
398 0 : return nExtractedClusters;
399 0 : }
400 :
401 : int AliHLTTPCDataPublisherComponent::DoInit( int argc, const char** argv )
402 : {
403 : /// inherited from AliHLTComponent: component initialisation and argument scan.
404 : int iResult=0;
405 :
406 : // component configuration
407 : //Stage 1: default initialization.
408 0 : const char* defaultArguments="-detector TPC -datatype 'DDL_RAW ' 'TPC ' -skipempty";
409 0 : if ((iResult = ConfigureFromArgumentString(1, &defaultArguments)) < 0)
410 0 : return iResult;
411 :
412 : //Stage 2: OCDB. - disabled
413 : //TString cdbPath("HLT/ConfigTPC/");
414 : //cdbPath += GetComponentID();
415 : //
416 : //iResult = ConfigureFromCDBTObjString(cdbPath);
417 : //if (iResult < 0)
418 : // return iResult;
419 :
420 : //Stage 3: command line arguments.
421 0 : if (argc && (iResult = ConfigureFromArgumentString(argc, argv)) < 0)
422 0 : return iResult;
423 0 : if ((iResult=AliHLTRawReaderPublisherComponent::DoInit(0, NULL))<0)
424 0 : return iResult;
425 :
426 0 : auto_ptr<AliRawClusterContainer> container(new AliRawClusterContainer);
427 0 : if (!container.get()) return -ENOMEM;
428 :
429 0 : fClusters=container.release();
430 :
431 0 : return iResult;
432 0 : }
433 :
434 : int AliHLTTPCDataPublisherComponent::DoDeinit()
435 : {
436 : /// inherited from AliHLTComponent: component cleanup
437 : int iResult=0;
438 :
439 0 : if (fpDecoder) delete fpDecoder;
440 0 : fpDecoder=NULL;
441 :
442 0 : return iResult;
443 : }
444 :
445 : int AliHLTTPCDataPublisherComponent::ScanConfigurationArgument(int argc, const char** argv)
446 : {
447 : /// inherited from AliHLTComponent: argument scan
448 0 : if (argc<1) return 0;
449 : int bMissingParam=0;
450 : int i=0;
451 0 : TString argument=argv[i];
452 :
453 : do {
454 : // -publish-raw
455 0 : if (argument.CompareTo("-publish-raw")==0) {
456 0 : if ((bMissingParam=(++i>=argc))) break;
457 0 : TString parameter=argv[i];
458 0 : if (parameter.CompareTo("all")==0) {
459 0 : fMode|=kPublishRawAll;
460 0 : return 2;
461 0 : } else if (parameter.CompareTo("filtered")==0) {
462 0 : fMode|=kPublishRawFiltered;
463 0 : fMode|=kRegisterClusterBlocks;
464 0 : fMode&=~kPublishRawAll;
465 0 : return 2;
466 0 : } else if (parameter.CompareTo("off")==0) {
467 0 : fMode&=~(kPublishRawAll|kPublishRawFiltered);
468 0 : return 2;
469 : } else {
470 0 : HLTError("invalid parameter for argument %s, expecting either 'all', 'filtered', or 'off' instead of %s", argument.Data(), parameter.Data());
471 0 : return -EPROTO;
472 : }
473 0 : }
474 : // -publish-clusters
475 0 : if (argument.CompareTo("-publish-clusters")==0) {
476 0 : if ((bMissingParam=(++i>=argc))) break;
477 0 : TString parameter=argv[i];
478 0 : if (parameter.CompareTo("all")==0) {
479 0 : fMode|=kPublishClustersAll;
480 0 : return 2;
481 0 : } else if (parameter.CompareTo("off")==0) {
482 0 : fMode&=~(kPublishClustersAll);
483 0 : return 2;
484 : } else {
485 0 : HLTError("invalid parameter for argument %s, expecting either 'all', or 'off' instead of %s", argument.Data(), parameter.Data());
486 0 : return -EPROTO;
487 : }
488 0 : }
489 :
490 : } while (0); // using do-while only to have break available
491 :
492 0 : return AliHLTRawReaderPublisherComponent::ScanConfigurationArgument(argc, argv);
493 0 : }
494 :
495 : int AliHLTTPCDataPublisherComponent::GetSpecificationFromEquipmentId(int id, AliHLTUInt32_t &specification) const
496 : {
497 : /// inherited from AliHLTRawReaderPublisherComponent: get specification
498 :
499 : // FIXME: add common functionality to AliHLTDAQ
500 : int partition;
501 : int slice;
502 0 : if (id < 840) {
503 0 : partition = id % 2;
504 0 : slice = (id - 768) / 2;
505 0 : } else {
506 0 : partition = (id % 4) + 2;
507 0 : slice = (id - 840) / 4;
508 : }
509 0 : specification=(slice<<24)|(slice<<16)|(partition<<8)|partition;
510 :
511 0 : return 0;
512 : }
513 :
514 : bool AliHLTTPCDataPublisherComponent::IsSelected(int equipmentId) const
515 : {
516 : /// inherited from AliHLTRawReaderPublisherComponent: check if a block is selected or not
517 : /// check if a raw data block needs to be published. This is the case if
518 : /// there is no corresponding compressed data, i.e. function returns
519 : /// only false if the block can be found in the cluster container
520 0 : if (CheckMode(kPublishRawAll))
521 0 : return true;
522 0 : if (!CheckMode(kPublishRawFiltered))
523 0 : return false;
524 :
525 0 : if (!fClusters)
526 0 : return true;
527 :
528 0 : int offset=AliHLTDAQ::DdlIDOffset(3);
529 0 : int count=AliHLTDAQ::NumberOfDdls(3);
530 0 : if (offset<0 || count<0)
531 0 : return true;
532 0 : if (equipmentId<offset)
533 0 : return true;
534 0 : equipmentId-=offset;
535 0 : if (equipmentId>=count)
536 0 : return true;
537 0 : int slice=equipmentId<72?equipmentId/2:(equipmentId-72)/4;
538 0 : int partition=equipmentId<72?equipmentId%2:((equipmentId-72)%4)+2;
539 0 : AliHLTUInt32_t specification=AliHLTTPCDefinitions::EncodeDataSpecification(slice, slice, partition, partition);
540 0 : for (AliHLTComponentBlockDataList::const_iterator i=fClusters->GetBlockDescriptors().begin();
541 0 : i!=fClusters->GetBlockDescriptors().end(); i++) {
542 0 : if (i->fSpecification==specification)
543 0 : return false;
544 : }
545 0 : return true;
546 0 : }
547 :
548 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::AliRawClusterContainer()
549 0 : : AliHLTLogging()
550 0 : , fBlockCount(0)
551 0 : , fTotalClusterCount(0)
552 0 : , fBlockClusterCount(0)
553 0 : , fpBuffer(NULL)
554 0 : , fBufferSize(0)
555 0 : , fDescriptors()
556 0 : , fCurrentBlock(NULL)
557 0 : , fTrackModelClusters(NULL)
558 0 : , fTrackModelClusterMap()
559 0 : , fIterator()
560 0 : , fState(0)
561 0 : {
562 : // constructor
563 0 : }
564 :
565 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::~AliRawClusterContainer()
566 0 : {
567 : // destructor
568 0 : }
569 :
570 : int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::SetTargetBuffer(AliHLTUInt8_t* pBuffer, int size)
571 : {
572 : // set/reset the external target buffer
573 0 : Clear();
574 0 : fpBuffer=pBuffer;
575 0 : fBufferSize=pBuffer?size:0;
576 0 : return 0;
577 : }
578 :
579 : int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Sort()
580 : {
581 : // merge track model clusters into partition cluster blocks
582 :
583 : // TODO: implement merging
584 : // decoding of track model clusters needs to be done after all
585 : // partition blocks have been decoded. The track model clusters are
586 : // then at the end of the target buffer and have to be sorted into the
587 : // other blocks
588 : // 1) move track model cluster block by its own size back in buffer
589 : // if not enough space, allocate temporary buffer and increase the
590 : // size estimator for the next event
591 : // 2) fill the index grid
592 : // 3) make appropriate gaps between the partition cluster blocks
593 : // 4) copy clusters into the partitions and update descriptors
594 0 : return -ENOSYS;
595 : }
596 :
597 : int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::CopyBlockDescriptors(AliHLTComponentBlockDataList& target) const
598 : {
599 : // fill block descriptors of extracted partition cluster blocks to target list
600 0 : target.insert(target.begin(), fDescriptors.begin(), fDescriptors.end());
601 0 : return fDescriptors.size();
602 : }
603 :
604 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginPartitionClusterBlock(int count, AliHLTUInt32_t specification)
605 : {
606 : /// iterator of partition clusters block of specification
607 0 : return ClusterIterator(count, AliHLTTPCDefinitions::RemainingClustersCompressedDataType(), specification, fCurrentBlock);
608 : }
609 :
610 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginTrackModelClusterBlock(int count)
611 : {
612 : /// iterator of track model clusters
613 0 : return ClusterIterator(count, AliHLTTPCDefinitions::ClusterTracksCompressedDataType(), 0x23000500, fTrackModelClusters);
614 : }
615 :
616 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::ClusterIterator(int /*count*/, AliHLTComponentDataType dt, AliHLTUInt32_t specification, AliHLTTPCRawClusterData* &pData)
617 : {
618 : /// iterator of partition clusters block of specification
619 0 : fBlockCount++;
620 0 : fIterator.~iterator();
621 0 : fCurrentBlock=NULL;
622 0 : fTrackModelClusters=NULL;
623 0 : fTrackModelClusterMap.clear();
624 0 : fBlockClusterCount=0;
625 : AliHLTUInt32_t filled=0;
626 0 : for (AliHLTComponentBlockDataList::const_iterator desc=fDescriptors.begin();
627 0 : desc!=fDescriptors.end(); desc++) {
628 0 : filled+=desc->fSize;
629 0 : if (desc->fSpecification==specification &&
630 0 : desc->fDataType==dt) {
631 0 : HLTFatal("partition cluster block with data type %s and specification 0x%08x has been already processed",
632 : AliHLTComponent::DataType2Text(dt).c_str(), specification);
633 0 : filled=fBufferSize;
634 0 : }
635 : }
636 :
637 : // insert an empty data block which is than updated later
638 0 : AliHLTComponentBlockData bd;
639 0 : AliHLTComponent::FillBlockData(bd);
640 0 : bd.fPtr=NULL;
641 0 : bd.fSize=0;
642 0 : bd.fOffset=filled;
643 0 : bd.fDataType=dt;
644 0 : bd.fSpecification=specification;
645 0 : fDescriptors.push_back(bd);
646 :
647 : // initialize only the header, during filling the cluster count of the header
648 : // and the block size will be incremented
649 : AliHLTUInt32_t blocksize=sizeof(AliHLTTPCRawClusterData);
650 0 : if (filled+blocksize>(unsigned)fBufferSize || fpBuffer==NULL) {
651 0 : new (&fIterator) iterator(this);
652 0 : return fIterator;
653 : }
654 0 : pData=reinterpret_cast<AliHLTTPCRawClusterData*>(fpBuffer+filled);
655 0 : pData->fVersion=0;
656 0 : pData->fCount=0;
657 0 : fDescriptors.back().fSize=blocksize;
658 0 : new (&fIterator) iterator(this);
659 0 : return fIterator;
660 0 : }
661 :
662 : AliHLTTPCRawCluster* AliHLTTPCDataPublisherComponent::AliRawClusterContainer::NextCluster(int slice, int partition)
663 : {
664 : /// increment to next cluster
665 0 : fTotalClusterCount++;
666 0 : fBlockClusterCount++;
667 0 : if (!fCurrentBlock && !fTrackModelClusters)
668 0 : return NULL;
669 0 : if (fDescriptors.size()==0)
670 0 : return NULL;
671 0 : AliHLTTPCRawClusterData* data=fCurrentBlock?fCurrentBlock:fTrackModelClusters;
672 0 : if (int(fDescriptors.back().fOffset+fDescriptors.back().fSize+sizeof(AliHLTTPCRawCluster))>=fBufferSize) {
673 0 : fState=-ENOSPC;
674 0 : return NULL;
675 : }
676 0 : data->fCount++;
677 0 : fDescriptors.back().fSize+=sizeof(AliHLTTPCRawCluster);
678 0 : if (fTrackModelClusters)
679 0 : fTrackModelClusterMap.push_back(AliHLTTPCSpacePointData::GetID(slice, partition, fBlockClusterCount));
680 0 : return data->fClusters+(data->fCount-1);
681 0 : }
682 :
683 : void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Clear(Option_t * /*option*/)
684 : {
685 : /// internal cleanup
686 0 : fBlockCount=0;
687 0 : fTotalClusterCount=0;
688 0 : fBlockClusterCount=0;
689 0 : fpBuffer=NULL;
690 0 : fBufferSize=0;
691 0 : fCurrentBlock=NULL;
692 0 : fTrackModelClusters=NULL;
693 0 : fTrackModelClusterMap.clear();
694 0 : fDescriptors.clear();
695 0 : fState=0;
696 0 : }
697 :
698 : void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Print(Option_t */*option*/) const
699 : {
700 : /// print info
701 0 : }
702 :
703 : AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator::Next(int slice, int partition)
704 : {
705 : // increment iterator
706 0 : if (fContainer) {
707 0 : fCluster=fContainer->NextCluster(slice, partition);
708 0 : if (fCluster) memset(fCluster, 0, sizeof(AliHLTTPCRawCluster));
709 : } else {
710 0 : fCluster=NULL;
711 : }
712 : //
713 : // set row offset for partitions 1-5
714 : // clusters are compressed with global row numbers 0-158,
715 : // in HLT they should have local row numbers withing their partition
716 : //
717 0 : if( (partition>=0) && (partition<AliHLTTPCGeometry::GetNPatches()) ){
718 0 : fRowOffset = AliHLTTPCGeometry::GetFirstRow(partition);
719 0 : } else {
720 0 : fRowOffset = 0;
721 0 : HLTWarning("invalid partition number %d for decompressed clusters",partition);
722 : }
723 0 : return *this;
724 : }
|