// @(#) $Id: AliHLTTPCCANeighboursCleaner.cxx,v 1.11 2010/08/27 22:30:47 ikulakov Exp $ // ************************************************************************** // This file is property of and copyright by the ALICE HLT Project * // ALICE Experiment at CERN, All rights reserved. * // * // Primary Authors: Sergey Gorbunov * // Ivan Kisel * // for The ALICE HLT Project. * // * // Developed by: Igor Kulakov * // Maksym Zyzak * // * // Permission to use, copy, modify and distribute this software and its * // documentation strictly for non-commercial purposes is hereby granted * // without fee, provided that the above copyright notice appears in all * // copies and that both the copyright notice and this permission notice * // appear in the supporting documentation. The authors make no claims * // about the suitability of this software for any purpose. It is * // provided "as is" without express or implied warranty. * // * //*************************************************************************** #define USE_ARBB //#define USE_CALL #include "AliHLTTPCCANeighboursCleaner.h" #include "AliHLTTPCCAMath.h" #include "AliHLTTPCCATracker.h" #include "AliHLTTPCCADef.h" #include using namespace arbb; #ifndef NVALGRIND #include #endif #define USE_EDGE_HITS // use edge links, which are not reciprocall // #define SELECT_EDGE_HITS // select edge links before use. otherwise all not reciprocall links will be used // * // * kill link to the neighbour if the neighbour is not pointed to the hit // * void AliHLTTPCCANeighboursCleaner::CheckLinksOnRows( dense& rowLinkUpData, dense& rowLinkDnData, dense rowHitDataIsUsed ) { // TODO first\last row is treated in the same way, but it has no down\up links #if 0 const usize taskSize = rowLinkUpData.length(); const dense indices = arbb::indices(0, taskSize, 1); const dense valid = (0 == rowHitDataIsUsed); // -- get connected hits const dense upIndices = select(valid, rowLinkUpData, -1); const dense dnIndices = select(valid, rowLinkDnData, -1); // -- check connections const dense validUp = (upIndices >= 0); const dense validDn = (dnIndices >= 0); dense dnFromUpIndices = gather( rowLinkDnData, upIndices, -1); dense upFromDnIndices = gather( rowLinkUpData, dnIndices, -1); // dnFromUpIndices = select(validUp, dnFromUpIndices, -1); // does it make gather faster?? // upFromDnIndices = select(validDn, upFromDnIndices, -1); const dense recUpLinks = (dnFromUpIndices == indices); // reciprocal links const dense recDnLinks = (upFromDnIndices == indices); // -- some one-way(1W) connections can be made reciprocal dense good1WUpLinks = validUp && (dnFromUpIndices == -1) && validDn && recDnLinks; dense good1WDnLinks = validDn && (upFromDnIndices == -1) && validUp && recUpLinks; // -- find first estimation which connections to be removed dense bad1WUpLinks = validUp && !recUpLinks && !good1WUpLinks; dense bad1WDnLinks = validDn && !recDnLinks && !good1WDnLinks; for( int iter = 0; iter < 1; iter++ ) { // almost no difference between 1 and 2 // -- some one-way connections can be made reciprocal const dense bad1WUpFromDnLinks = gather( bad1WUpLinks, dnIndices, false ); const dense bad1WDnFromUpLinks = gather( bad1WDnLinks, upIndices, false ); const dense recDnFromDnLinks = gather( recDnLinks, dnIndices, false ); const dense recUpFromUpLinks = gather( recUpLinks, upIndices, false ); good1WUpLinks = validUp && (dnFromUpIndices == -1 || bad1WDnFromUpLinks) && validDn && recDnLinks; good1WDnLinks = validDn && (upFromDnIndices == -1 || bad1WUpFromDnLinks) && validUp && recUpLinks; good1WUpLinks = good1WUpLinks && !recDnFromDnLinks; // do not merge chains good1WDnLinks = good1WDnLinks && !recDnFromDnLinks; // -- find next estimation which connections to be removed bad1WUpLinks = validUp && !recUpLinks && !good1WUpLinks; bad1WDnLinks = validDn && !recDnLinks && !good1WDnLinks; } // -- make some one-way connections reciprocal rowLinkUpData = scatter( indices, dnIndices, rowLinkUpData, good1WDnLinks ); rowLinkDnData = scatter( indices, upIndices, rowLinkDnData, good1WUpLinks ); // -- remove no reciprocal connections rowLinkUpData = select(bad1WUpLinks, -1, rowLinkUpData); rowLinkDnData = select(bad1WDnLinks, -1, rowLinkDnData); #elif 1 const usize taskSize = rowLinkUpData.length(); const dense indices = arbb::indices(0, taskSize, 1); const dense valid = (0 == rowHitDataIsUsed); _for( boolean isBad = true, isBad, ) { // -- get connected hits const dense upIndices = select(valid, rowLinkUpData, -1); const dense dnIndices = select(valid, rowLinkDnData, -1); // -- check connections const dense validUp = (upIndices >= 0); const dense validDn = (dnIndices >= 0); dense dnFromUpIndices = gather( rowLinkDnData, upIndices, -1); dense upFromDnIndices = gather( rowLinkUpData, dnIndices, -1); // dnFromUpIndices = select(validUp, dnFromUpIndices, -1); // does it make gather faster?? // upFromDnIndices = select(validDn, upFromDnIndices, -1); const dense recUpLinks = (dnFromUpIndices == indices); // reciprocal links const dense recDnLinks = (upFromDnIndices == indices); // -- some one-way connections can be made reciprocal const dense recDnFromUpLinks = gather( recDnLinks, upIndices, false ); const dense recUpFromDnLinks = gather( recUpLinks, dnIndices, false ); dense good1WUpLinks = validUp && !recDnFromUpLinks && validDn && recDnLinks; dense good1WDnLinks = validDn && !recUpFromDnLinks && validUp && recUpLinks; // // do not merge chains // const dense recDnFromDnLinks = gather( recDnLinks, dnIndices, false ); // const dense recUpFromUpLinks = gather( recUpLinks, upIndices, false ); // good1WUpLinks = good1WUpLinks && !recDnFromDnLinks; // good1WDnLinks = good1WDnLinks && !recDnFromDnLinks; // -- find next estimation which connections to be removed const dense bad1WUpLinks = validUp && !recUpLinks && !good1WUpLinks; const dense bad1WDnLinks = validDn && !recDnLinks && !good1WDnLinks; // -- remove no reciprocal connections rowLinkUpData = select(bad1WUpLinks, -1, rowLinkUpData); rowLinkDnData = select(bad1WDnLinks, -1, rowLinkDnData); isBad = any( bad1WUpLinks || bad1WDnLinks ); _if( !isBad ) { // -- make some one-way connections reciprocal rowLinkUpData = scatter( indices, dnIndices, rowLinkUpData, good1WDnLinks ); rowLinkDnData = scatter( indices, upIndices, rowLinkDnData, good1WUpLinks ); } _end_if; } _end_for; #endif // 0 // TODO SELECT_EDGE_HITS } void AliHLTTPCCANeighboursCleaner::run() { sfloat_v X,Y,Z,Xup,Yup,Zup,Xdown,Ydown,Zdown, X4,Y4,Z4; sfloat_v Yx1, Yx2, Yxx1, Yxx2, Yxxx, Zx1, Zx2, Zxx1, Zxx2, Zxxx, iX; const short_v minusOne = -1; #ifdef USE_ARBB const int numberOfRows = AliHLTTPCCAParameters::NumberOfRows; // get global indices (links) from local dense< THitI_ArBB > rowLinkUpData, rowLinkDnData; dense< i16 > rowHitDataIsUsed; AliHLTTPCCARowHit::TDense<1> rows = fRows_ArBB.flatten(); const dense< THitI_ArBB > rowNHits = fRows_ArBB.lengths(); const dense< THitI_ArBB > rowsShift = add_scan( rowNHits ); const dense< THitI_ArBB > rowLinkUpDataShift = repeat( rowsShift + rowNHits , rowNHits ); const dense< THitI_ArBB > rowLinkDnDataShift = repeat( rowsShift - shift(rowNHits,1,0), rowNHits ); rowLinkUpData = select( rows.LinkUp() != -1, rows.LinkUp() + rowLinkUpDataShift, -1 ); rowLinkDnData = select( rows.LinkUp() != -1, rows.LinkDn() + rowLinkDnDataShift, -1 ); rowHitDataIsUsed = rows.IsUsed(); CheckLinksOnRows( rowLinkUpData, rowLinkDnData, rowHitDataIsUsed ); // get local indices (links) from global rowLinkUpData = select( rowLinkUpData != -1, rowLinkUpData - rowLinkUpDataShift, -1 ); rowLinkDnData = select( rowLinkDnData != -1, rowLinkDnData - rowLinkDnDataShift, -1 ); rows.SetLinkUp( rowLinkUpData ); rows.SetLinkDn( rowLinkDnData ); fRows_ArBB = reshape_nested_lengths_my( rows, rowNHits ); #else // USE_ARBB const int rowStep = AliHLTTPCCAParameters::RowStep; const int beginRowIndex = rowStep; const int endRowIndex = numberOfRows - rowStep; for ( int rowIndex = beginRowIndex; rowIndex < endRowIndex; ++rowIndex ) { const AliHLTTPCCARow &row = data.Row( rowIndex ); const AliHLTTPCCARow &rowUp = data.Row( rowIndex + rowStep ); const AliHLTTPCCARow &rowDown = data.Row( rowIndex - rowStep ); const int numberOfHits = row.NHits(); // - look at up link, if it's valid but the down link in the row above doesn't link to us remove // the link // - look at down link, if it's valid but the up link in the row below doesn't link to us remove // the link for ( int hitIndex = 0; hitIndex < numberOfHits; hitIndex += short_v::Size ) { const ushort_v hitIndexes = ushort_v( Vc::IndexesFromZero ) + hitIndex; short_m validHitsMask = hitIndexes < numberOfHits; assert( ( validHitsMask && ((hitIndexes >= 0 ) && (hitIndexes < row.NHits() )) ) == validHitsMask ); validHitsMask &= ( short_v(data.HitDataIsUsed( row ), hitIndexes, validHitsMask ) == short_v( Vc::Zero ) ); // not-used hits can be connected only with not-used, so only one check is needed // collect information // up part assert( ( validHitsMask && ((hitIndexes >= 0 ) && (hitIndexes < row.NHits() )) ) == validHitsMask ); const short_v up = short_v(data.HitLinkUpData( row ), hitIndexes, validHitsMask ); VALGRIND_CHECK_VALUE_IS_DEFINED( up ); const ushort_v upIndexes = up.staticCast(); assert ( (validHitsMask && (up >= minusOne) ) == validHitsMask ); short_m upMask = validHitsMask && up >= short_v( Vc::Zero ); assert( ( upMask && ((upIndexes >= 0 ) && (upIndexes < rowUp.NHits() )) ) == upMask ); short_v downFromUp = short_v(data.HitLinkDownData( rowUp ), upIndexes, upMask ); // down part const short_v dn = short_v(data.HitLinkDownData( row ), hitIndexes, validHitsMask ); assert ( ( validHitsMask && (dn >= minusOne) ) == validHitsMask ); VALGRIND_CHECK_VALUE_IS_DEFINED( dn ); const ushort_v downIndexes = dn.staticCast(); short_m dnMask = validHitsMask && dn >= short_v( Vc::Zero ); assert( ( dnMask && ((downIndexes >= 0 ) && (downIndexes < rowDown.NHits() )) ) == dnMask ); short_v upFromDown = short_v(data.HitLinkUpData( rowDown ), downIndexes, dnMask ); // -- make clean -- // check if some one-way links can be good #ifdef USE_EDGE_HITS // std::cout << "downFromUp " << downFromUp << " upFromDown " << upFromDown << " validHitsMask " << validHitsMask << " dnMask " << dnMask << " upMask " << upMask << std::endl; // IKu debug upMask &= (downFromUp == -1) && (upFromDown == static_cast(hitIndexes)); // have mutual link only downwards. is up-link good? dnMask &= (upFromDown == -1) && (downFromUp == static_cast(hitIndexes)); // have mutual link only upwards. is down-link good? upMask &= dn >= short_v( Vc::Zero ); dnMask &= up >= short_v( Vc::Zero ); #ifdef SELECT_EDGE_HITS dnMask &= short_m(rowIndex + 2*rowStep < numberOfRows); // will use up & upup hits for check. If no one there then have to delete link upMask &= short_m(rowIndex - 2*rowStep >= 0); const short_m upOrDnMask = upMask || dnMask; if(!upOrDnMask.isEmpty()) // TODO do we need edge links - investigate { X = data.RowX( rowIndex ); assert( ( upOrDnMask && ((hitIndexes >= 0 ) && (hitIndexes < row.NHits() )) ) == upOrDnMask ); Y.gather( data.HitDataY( row ), hitIndexes, static_cast(upOrDnMask) );//HitDataY( row, hitIndex ); Z.gather( data.HitDataZ( row ), hitIndexes, static_cast(upOrDnMask) );// data.HitDataZ( row, hitIndex ); if(!dnMask.isEmpty()) { Xdown = data.RowX( rowIndex - rowStep ); assert( ( dnMask && ((downIndexes >= 0 ) && (downIndexes < rowDown.NHits() )) ) == dnMask ); Ydown.gather( data.HitDataY( rowDown ), downIndexes, static_cast(dnMask) ); Zdown.gather( data.HitDataZ( rowDown ), downIndexes, static_cast(dnMask) ); const AliHLTTPCCARow &rowUpUp = data.Row( rowIndex + 2*rowStep ); assert( ( dnMask && ((upIndexes >= 0 ) && (upIndexes < rowUp.NHits() )) ) == dnMask ); const short_v upup = short_v(data.HitLinkUpData( rowUp ), upIndexes, dnMask ); dnMask &= upup >= short_v( Vc::Zero ); // can't check, so can't save any other link // short_v downFromUpUp = short_v(data.HitLinkDownData( rowUpUp ), static_cast(upupIndexes), dnMask ); // dnMask &= downFromUpUp != -1; X4 = data.RowX( rowIndex + 2*rowStep ); const ushort_v upupIndexes = upup.staticCast(); ASSERT( ( dnMask && (upupIndexes < rowUpUp.NHits() ) ) == dnMask, " dnMask= " << dnMask << " upupIndexes= " << upupIndexes << " rowUpUp.NHits()= "<< rowUpUp.NHits() ); Y4.gather( data.HitDataY( rowUpUp ), upupIndexes, static_cast(dnMask) ); Z4.gather( data.HitDataZ( rowUpUp ), upupIndexes, static_cast(dnMask) ); iX = Vc::One/(X - X4); Yx1 = (Y - Y4)*iX; Yx2 = Y - X*Yx1; Yxx2 = Yx2 + Yx1*Xdown; Zx1 = (Z - Z4)*iX; Zx2 = Z - X*Zx1; Zxx2 = Zx2 + Zx1*Xdown; sfloat_v err2Y, err2Z; param.GetClusterErrors2(upIndexes,Xdown,Ydown,Zdown,err2Y, err2Z); // sfloat_v ch = CAMath::Abs((Yxx2 - Ydown)/CAMath::Sqrt(err2Y))+CAMath::Abs((Zxx2 - Zdown)/CAMath::Sqrt(err2Z)); // upMask &= static_cast(ch < 50.f); dnMask &= static_cast(CAMath::Abs((Yxx2 - Ydown)*CAMath::RSqrt(err2Y)) < 50.f); // dnMask &= static_cast(CAMath::Abs((Zxx2 - Zdown)/CAMath::Sqrt(err2Z)) < 80.f); } if(!upMask.isEmpty()) { Xup = data.RowX( rowIndex + rowStep ); assert( ( upMask && ((upIndexes >= 0 ) && (upIndexes < rowUp.NHits() )) ) == upMask ); Yup.gather( data.HitDataY( rowUp ), upIndexes, static_cast(upMask) ); Zup.gather( data.HitDataZ( rowUp ), upIndexes, static_cast(upMask) ); const AliHLTTPCCARow &rowDownDown = data.Row( rowIndex - 2*rowStep ); assert( ( upMask && ((downIndexes >= 0 ) && (downIndexes < rowDown.NHits() )) ) == upMask ); const short_v downdown = short_v(data.HitLinkDownData( rowDown ), downIndexes, upMask ); upMask &= downdown >= short_v( Vc::Zero ); // can't check, so can't save any other link // short_v upFromDownDown = short_v(data.HitLinkUpData( rowDownDown ), static_cast(downdownIndexes), upMask ); // upMask &= upFromDownDown != -1; ASSERT( ( upMask && (downdown >= 0 ) && (downdown < rowDownDown.NHits()) ) == upMask, " upMask= " << upMask << " dn= " << dn << " downdown= " << downdown << " row= " << rowIndex << " rowDownDown.NHits= " << rowDownDown.NHits()); X4 = data.RowX( rowIndex - 2*rowStep ); const ushort_v downdownIndexes = downdown.staticCast(); Y4.gather( data.HitDataY( rowDownDown ), downdownIndexes, static_cast(upMask) ); Z4.gather( data.HitDataZ( rowDownDown ), downdownIndexes, static_cast(upMask) ); iX = Vc::One/(X - X4); Yx1 = (Y - Y4)*iX; Yx2 = Y - X*Yx1; Yxx2 = Yx2 + Yx1*Xup; Zx1 = (Z - Z4)*iX; Zx2 = Z - X*Zx1; Zxx2 = Zx2 + Zx1*Xup; sfloat_v err2Y, err2Z; param.GetClusterErrors2(upIndexes,Xup,Yup,Zup,err2Y, err2Z); //std::cout << upMask << " "<(ch < 50.f); // upMask &= static_cast(CAMath::Abs((Yxx2 - Yup)/CAMath::Sqrt(err2Y)) < 50.f); // upMask &= static_cast(CAMath::Abs((Zxx2 - Zup)/CAMath::Sqrt(err2Z)) < 80.f); // std::cout << upMask << " "<= 0 ) && (upIndexes < rowUp.NHits() )) ) == upMask ); assert( ( dnMask && ((downIndexes >= 0 ) && (downIndexes < rowDown.NHits() )) ) == dnMask ); // make from good one-way links mutual links downFromUp( upMask ) = static_cast(hitIndexes); upFromDown( dnMask ) = static_cast(hitIndexes); data.SetHitLinkDownData( rowUp, upIndexes, downFromUp, upMask ); data.SetHitLinkUpData( rowDown, downIndexes, upFromDown, dnMask ); // data.SetHitLinkDownData( rowUp, upIndexes, static_cast(hitIndexes), upMask ); // data.SetHitLinkUpData( rowDown, downIndexes, static_cast(hitIndexes), dnMask ); #endif // USE_EDGE_HITS // delete one-way links (all other one-way links) const short_m badUpMask = validHitsMask && (downFromUp != static_cast(hitIndexes)); VALGRIND_CHECK_VALUE_IS_DEFINED( up ); assert( ( badUpMask && ((hitIndexes >= 0 ) && (hitIndexes < row.NHits() )) ) == badUpMask ); data.SetHitLinkUpData( row, hitIndexes, minusOne, badUpMask ); const short_m badDnMask = validHitsMask && (upFromDown != static_cast(hitIndexes)); VALGRIND_CHECK_VALUE_IS_DEFINED( dn ); assert( ( badDnMask && ((hitIndexes >= 0 ) && (hitIndexes < row.NHits() )) ) == badDnMask ); data.SetHitLinkDownData( row, hitIndexes, minusOne, badDnMask ); } // for iHit } // for iRow #endif // USE_ARBB }