//-*****************************************************************************
void GetRelevantSampleTimes( ProcArgs &args, TimeSamplingPtr timeSampling,
                            size_t numSamples, SampleTimeSet &output )
{
    if ( numSamples < 2 )
    {
        output.insert( 0.0 );
        return;
    }

    chrono_t frameTime = args.frame / args.fps;

    chrono_t shutterOpenTime = ( args.frame + args.shutterOpen ) / args.fps;

    chrono_t shutterCloseTime = ( args.frame + args.shutterClose ) / args.fps;

    std::pair<index_t, chrono_t> shutterOpenFloor =
        timeSampling->getFloorIndex( shutterOpenTime, numSamples );

    std::pair<index_t, chrono_t> shutterCloseCeil =
        timeSampling->getCeilIndex( shutterCloseTime, numSamples );

    //TODO, what's a reasonable episilon?
    static const chrono_t epsilon = 1.0 / 10000.0;

    //check to see if our second sample is really the
    //floor that we want due to floating point slop
    //first make sure that we have at least two samples to work with
    if ( shutterOpenFloor.first < shutterCloseCeil.first )
    {
        //if our open sample is less than open time,
        //look at the next index time
        if ( shutterOpenFloor.second < shutterOpenTime )
        {
            chrono_t nextSampleTime =
                     timeSampling->getSampleTime( shutterOpenFloor.first + 1 );

            if ( fabs( nextSampleTime - shutterOpenTime ) < epsilon )
            {
                shutterOpenFloor.first += 1;
                shutterOpenFloor.second = nextSampleTime;
            }
        }
    }


    for ( index_t i = shutterOpenFloor.first; i < shutterCloseCeil.first; ++i )
    {
        output.insert( timeSampling->getSampleTime( i ) );
    }

    //no samples above? put frame time in there and get out
    if ( output.size() == 0 )
    {
        output.insert( frameTime );
        return;
    }

    chrono_t lastSample = *(output.rbegin() );

    //determine whether we need the extra sample at the end
    if ( ( fabs( lastSample - shutterCloseTime ) > epsilon )
         && lastSample < shutterCloseTime )
    {
        output.insert( shutterCloseCeil.second );
    }
}
AtNode * ProcessPointsBase(
        IPoints & prim, ProcArgs & args,
        SampleTimeSet & sampleTimes,
        std::vector<AtPoint> & vidxs,
		std::vector<float> & radius,
		MatrixSampleMap * xformSamples )
{
    if ( !prim.valid() )
    {
        return NULL;
    }
    
    Alembic::AbcGeom::IPointsSchema  &ps = prim.getSchema();
    TimeSamplingPtr ts = ps.getTimeSampling();
    
	sampleTimes.insert( ts->getFloorIndex(args.frame / args.fps, ps.getNumSamples()).second );
    
    std::string name = args.nameprefix + prim.getFullName();
    
    AtNode * instanceNode = NULL;
    
    std::string cacheId;
    
    SampleTimeSet singleSampleTimes;
    singleSampleTimes.insert( ts->getFloorIndex(args.frame / args.fps, ps.getNumSamples()).second );

	ICompoundProperty arbGeomParams = ps.getArbGeomParams();
	ISampleSelector frameSelector( *singleSampleTimes.begin() );
	std::vector<std::string> tags;

	//get tags
	if ( arbGeomParams != NULL && arbGeomParams.valid() )
	{
		if (arbGeomParams.getPropertyHeader("mtoa_constant_tags") != NULL)
		{
			const PropertyHeader * tagsHeader = arbGeomParams.getPropertyHeader("mtoa_constant_tags");
			if (IStringGeomParam::matches( *tagsHeader ))
			{
				IStringGeomParam param( arbGeomParams,  "mtoa_constant_tags" );
				if ( param.valid() )
				{
					IStringGeomParam::prop_type::sample_ptr_type valueSample =
									param.getExpandedValue( frameSelector ).getVals();

					if ( param.getScope() == kConstantScope || param.getScope() == kUnknownScope)
					{
						Json::Value jtags;
						Json::Reader reader;
						if(reader.parse(valueSample->get()[0], jtags))
							for( Json::ValueIterator itr = jtags.begin() ; itr != jtags.end() ; itr++ )
							{
								tags.push_back(jtags[itr.key().asUInt()].asString());
							}
					}
				}
			}
		}
	}

    if ( args.makeInstance )
    {
        std::ostringstream buffer;
        AbcA::ArraySampleKey sampleKey;
        
        
        for ( SampleTimeSet::iterator I = sampleTimes.begin();
                I != sampleTimes.end(); ++I )
        {
            ISampleSelector sampleSelector( *I );
            ps.getPositionsProperty().getKey(sampleKey, sampleSelector);
            
            buffer << GetRelativeSampleTime( args, (*I) ) << ":";
            sampleKey.digest.print(buffer);
            buffer << ":";
        }
        
        cacheId = buffer.str();
        
        instanceNode = AiNode( "ginstance" );
        AiNodeSetStr( instanceNode, "name", name.c_str() );
		args.createdNodes.push_back(instanceNode);

        if ( args.proceduralNode )
        {
            AiNodeSetByte( instanceNode, "visibility",
                    AiNodeGetByte( args.proceduralNode, "visibility" ) );
        
        }
        else
        {
            AiNodeSetByte( instanceNode, "visibility", AI_RAY_ALL );
        }

		ApplyTransformation( instanceNode, xformSamples, args );

		NodeCache::iterator I = g_meshCache.find(cacheId);

		// parameters overrides
		if(args.linkOverride)
			ApplyOverrides(name, instanceNode, tags, args);

		// shader assignation
		if (nodeHasParameter( instanceNode, "shader" ) )
		{
			if(args.linkShader)
			{
				ApplyShaders(name, instanceNode, tags, args);
			}
			else
			{
				AtArray* shaders = AiNodeGetArray(args.proceduralNode, "shader");
				if (shaders->nelements != 0)
				   AiNodeSetArray(instanceNode, "shader", AiArrayCopy(shaders));
			}
		}

        if ( I != g_meshCache.end() )
        {
            AiNodeSetPtr(instanceNode, "node", (*I).second );	
			return NULL;
        }
    }
    

    bool isFirstSample = true;

	float radiusPoint = 0.1f;
	if (AiNodeLookUpUserParameter(args.proceduralNode, "radiusPoint") !=NULL )
		radiusPoint = AiNodeGetFlt(args.proceduralNode, "radiusPoint");
	
	

	bool useVelocities = false;
	if ((sampleTimes.size() == 1) && (args.shutterOpen != args.shutterClose))
	{
		// no sample, and motion blur needed, let's try to get velocities.
		if(ps.getVelocitiesProperty().valid())
			useVelocities = true;
	}

	for ( SampleTimeSet::iterator I = sampleTimes.begin();
          I != sampleTimes.end(); ++I, isFirstSample = false)
    {
        ISampleSelector sampleSelector( *I );
        Alembic::AbcGeom::IPointsSchema::Sample sample = ps.getValue( sampleSelector );

		Alembic::Abc::P3fArraySamplePtr v3ptr = sample.getPositions();
		size_t pSize = sample.getPositions()->size(); 

		if(useVelocities && isFirstSample)
		{
			float scaleVelocity = 1.0f;
			if (AiNodeLookUpUserParameter(args.proceduralNode, "scaleVelocity") !=NULL )
				scaleVelocity = AiNodeGetFlt(args.proceduralNode, "scaleVelocity");

			vidxs.resize(pSize*2);
			Alembic::Abc::V3fArraySamplePtr velptr = sample.getVelocities();

			float timeoffset = ((args.frame / args.fps) - ts->getFloorIndex((*I), ps.getNumSamples()).second) * args.fps;

			for ( size_t pId = 0; pId < pSize; ++pId ) 
			{
				Alembic::Abc::V3f posAtOpen = ((*v3ptr)[pId] + (*velptr)[pId] * scaleVelocity *-timeoffset);			
				AtPoint pos1;
				pos1.x = posAtOpen.x;
				pos1.y = posAtOpen.y;
				pos1.z = posAtOpen.z;
				vidxs[pId]= pos1;

				Alembic::Abc::V3f posAtEnd = ((*v3ptr)[pId] + (*velptr)[pId]* scaleVelocity *(1.0f-timeoffset));
				AtPoint pos2;
				pos2.x = posAtEnd.x;
				pos2.y = posAtEnd.y;
				pos2.z = posAtEnd.z;
				vidxs[pId+pSize]= pos2;
				
				radius.push_back(radiusPoint);	
			}
		}
		else
			// not motion blur or correctly sampled particles
		{
			for ( size_t pId = 0; pId < pSize; ++pId ) 
			{
				AtPoint pos;
				pos.x = (*v3ptr)[pId].x;
				pos.y = (*v3ptr)[pId].y;
				pos.z = (*v3ptr)[pId].z;
				vidxs.push_back(pos);
				radius.push_back(radiusPoint);
			}
		}
	}
    
    AtNode* pointsNode = AiNode( "points" );
    
    if (!pointsNode)
    {
        AiMsgError("Failed to make points node for %s",
                prim.getFullName().c_str());
        return NULL;
    }
    

    args.createdNodes.push_back(pointsNode);
    if ( instanceNode != NULL)
    {
        AiNodeSetStr( pointsNode, "name", (name + ":src").c_str() );
    }
    else
    {
        AiNodeSetStr( pointsNode, "name", name.c_str() );
    }
    
    if(!useVelocities)
	{
		AiNodeSetArray(pointsNode, "points",
				AiArrayConvert( vidxs.size() / sampleTimes.size(), 
						sampleTimes.size(), AI_TYPE_POINT, (void*)(&(vidxs[0]))
								));
		AiNodeSetArray(pointsNode, "radius",
				AiArrayConvert( vidxs.size() / sampleTimes.size(), 
						sampleTimes.size(), AI_TYPE_FLOAT, (void*)(&(radius[0]))
								));

		if ( sampleTimes.size() > 1 )
		{
			std::vector<float> relativeSampleTimes;
			relativeSampleTimes.reserve( sampleTimes.size() );
        
			for (SampleTimeSet::const_iterator I = sampleTimes.begin();
					I != sampleTimes.end(); ++I )
			{
			   chrono_t sampleTime = GetRelativeSampleTime( args, (*I) );

				relativeSampleTimes.push_back(sampleTime);
                    
			}
        
			AiNodeSetArray( pointsNode, "deform_time_samples",
					AiArrayConvert(relativeSampleTimes.size(), 1,
							AI_TYPE_FLOAT, &relativeSampleTimes[0]));
		}
	}
	else
	{
		AiNodeSetArray(pointsNode, "points",
				AiArrayConvert( vidxs.size() / 2, 
						2, AI_TYPE_POINT, (void*)(&(vidxs[0]))
								));
		AiNodeSetArray(pointsNode, "radius",
				AiArrayConvert( vidxs.size() /2 / sampleTimes.size(), 
						sampleTimes.size(), AI_TYPE_FLOAT, (void*)(&(radius[0]))
								));		
		
		AiNodeSetArray( pointsNode, "deform_time_samples",
					AiArray(2, 1, AI_TYPE_FLOAT, 0.f, 1.f));

	}

   AddArbitraryGeomParams( arbGeomParams, frameSelector, pointsNode );
    
    if ( instanceNode == NULL )
	{
        if ( xformSamples )
        {
            ApplyTransformation( pointsNode, xformSamples, args );
        }
        
        return pointsNode;
	}
    else
    {
        AiNodeSetByte( pointsNode, "visibility", 0 );

		  AiNodeSetInt( pointsNode, "mode", 1 );
        
        AiNodeSetPtr(instanceNode, "node", pointsNode );
        g_meshCache[cacheId] = pointsNode;
        return pointsNode;
    }
    
}