tmp<volScalarField> constantRadiation::Shs()
{
    tmp<volScalarField> tShs
    (
        volScalarField::New
        (
            typeName + ":Shs",
            film().regionMesh(),
            dimensionedScalar(dimMass/pow3(dimTime), 0)
        )
    );

    const scalar time = film().time().value();

    if ((time >= timeStart_) && (time <= timeStart_ + duration_))
    {
        scalarField& Shs = tShs.ref();
        const scalarField& qr = qrConst_;
        const scalarField& alpha = filmModel_.alpha();

        Shs = mask_*qr*alpha*absorptivity_;
    }

    return tShs;
}
Beispiel #2
0
void ossimBuckeyeSensor::imagingRay(const ossimDpt& image_point,
	ossimEcefRay&   image_ray) const
{
	if(traceDebug()) ossimNotify(ossimNotifyLevel_DEBUG) << "ossimBuckeyeSensor::imagingRay: ..... entered" << std::endl;

	ossimDpt f1 ((image_point) - theRefImgPt);
	f1.x *= thePixelSize.x;
	f1.y *= -thePixelSize.y;
	ossimDpt film (f1 - thePrincipalPoint);

	if(traceDebug())
	{
		ossimNotify(ossimNotifyLevel_DEBUG) << "pixel size   = " << thePixelSize << std::endl;
		ossimNotify(ossimNotifyLevel_DEBUG) << "principal pt = " << thePrincipalPoint << std::endl;
		ossimNotify(ossimNotifyLevel_DEBUG) << "film pt      = " << film << std::endl;
	}

	if (theLensDistortion.valid())
	{
		ossimDpt filmOut;
		theLensDistortion->undistort(film, filmOut);
		film = filmOut;
	}

	ossimColumnVector3d cam_ray_dir (film.x,
		film.y,
		-theFocalLength);
	ossimEcefVector     ecf_ray_dir (theCompositeMatrix*cam_ray_dir);
	ecf_ray_dir = ecf_ray_dir*(1.0/ecf_ray_dir.magnitude());

	image_ray.setOrigin(theAdjEcefPlatformPosition);
	image_ray.setDirection(ecf_ray_dir);

	if(traceDebug()) ossimNotify(ossimNotifyLevel_DEBUG) << "ossimBuckeyeSensor::imagingRay: ..... leaving" << std::endl;
}
void ossimSpectraboticsRedEdgeModel::imagingRay(const ossimDpt& image_point,
                                    ossimEcefRay&   image_ray) const
{
    if(traceDebug())
    {
       ossimNotify(ossimNotifyLevel_DEBUG) << "ossimSpectraboticsRedEdgeModel::imagingRay: ..... entered" << std::endl;
    }
    ossimDpt film (image_point.x-m_calibratedCenter.x,
                   m_calibratedCenter.y - image_point.y); //- theRefImgPt);
//    ossimDpt film (image_point-m_calibratedCenter); //- theRefImgPt);
    if(m_lensDistortion.valid())
    {
      ossimDpt tempFilm(film.x/m_norm, film.y/m_norm);
      ossimDpt filmOut;
      m_lensDistortion->undistort(tempFilm, filmOut);
      film.x = filmOut.x*m_norm;
      film.y = filmOut.y*m_norm;
    }
    film.x *= m_pixelSize.x; // pixel size on the film
    film.y *= m_pixelSize.y; // pixel size on the film
    ossimColumnVector3d cam_ray_dir (film.x,
                                     film.y,
                                     -m_focalLength);
    ossimEcefVector     ecf_ray_dir (m_compositeMatrix*cam_ray_dir);
    ecf_ray_dir = ecf_ray_dir*(1.0/ecf_ray_dir.magnitude());

    image_ray.setOrigin(m_adjEcefPlatformPosition);
    image_ray.setDirection(ecf_ray_dir);
}
Beispiel #4
0
double FilmMinimizerTM::func(const gsl_vector * x, void * params)
{
	double ret=100; int status;
	FilmFuncParams* p=(FilmFuncParams*)params;
	double &n1=p->n1, &n3=p->n3, &k=p->k, *bettaexp=p->bettaexp;
	FilmParams film(x);

	DispEqTMSolver Solver(DispEqTMFuncParams(n1, film.n, n3, k*film.H));
	if( (status=Solver.Run(n3,film.n, 1e-6)) ==GSL_SUCCESS) 
	{
		int i,j,roots_n=Solver.roots.GetSize(),betta_n=Solver.min_roots; double cur_ret;
		for(i=0;i<=roots_n-betta_n;i++)
		{
			cur_ret=0;			
			for(j=0;j<betta_n;j++)
			{
				cur_ret+=abs(Solver.roots[j+i]-bettaexp[j]);
			}
			if(cur_ret<ret) 
			{
				ret=cur_ret;
				p->betta_teor.RemoveAll(); betta_info t;
				for(j=0;j<betta_n;j++)
				{
					t.val=Solver.roots[j+i]; t.n=j+i; p->betta_teor.Add(t);
				}
			}
		}
	}
	func_call_cntr+=DispEqTMSolver::func_call_cntr;
	return ret;
}
Beispiel #5
0
int Scene::render() {
	RayTracer raytracer;
	Camera camera(options);
	Film film(options);
	Sampler sampler(options);
	Sample sample;
	Ray ray;
	Color color;
	while(sampler.generate_sample(sample)) {
		camera.generate_ray(sample, ray);
		raytracer.trace(options, ray, color, options.maxdepth);
		film.commit(sample, color);
	}
	return film.write_image();
};
Beispiel #6
0
void Scene::render() {
    Sampler sampler(this->width, this->height);
    Film film(this->width, this->height);
    Sample sample;
    Ray ray;
    Vector color;

    while(sampler.getSample(&sample)) {
        this->camera.generateRay(sample, &ray);
        this->tracer.trace(ray, &color);
        film.commit(sample, color);
    }
    film.writeImage(this->outputName);
    film.cleanUp();
}
void ossimSpectraboticsRedEdgeModel::worldToLineSample(const ossimGpt& world_point,
                                           ossimDpt&       image_point) const
{
   #if 0
   if((theBoundGndPolygon.getNumberOfVertices() > 0)&&
      (!theBoundGndPolygon.hasNans()))
   {
      if (!(theBoundGndPolygon.pointWithin(world_point)))
      {
//         image_point.makeNan();
//          image_point = extrapolate(world_point);
//         return;
      }         
   }
   #endif
   ossimEcefPoint g_ecf(world_point);
   ossimEcefVector ecfRayDir(g_ecf - m_adjEcefPlatformPosition);
   ossimColumnVector3d camRayDir (m_compositeMatrixInverse*ecfRayDir.data());
   
      
    double scale = -m_focalLength/camRayDir[2];
    ossimDpt film (scale*camRayDir[0], scale*camRayDir[1]);
    film.x /= m_pixelSize.x; // get into pixel coordinates
    film.y /= m_pixelSize.y;


    // now distort to find the true image coordinate location
    if (m_lensDistortion.valid())
    {
      ossimDpt filmOut;
      film.x /= m_norm; // normalize radial
      film.y /= m_norm;
       m_lensDistortion->distort(film, filmOut);
      film = filmOut;//+m_lensDistortion->getCenter();
      film.x *= m_norm;
      film.y *= m_norm; 
    }

    // invert Y to get back to left handed image space
    ossimDpt f1(film.x+m_calibratedCenter.x, m_calibratedCenter.y-film.y);
    
    image_point = f1;
}
tmp<volScalarField> primaryRadiation::Shs()
{
    tmp<volScalarField> tShs
    (
        volScalarField::New
        (
            typeName + ":Shs",
            film().regionMesh(),
            dimensionedScalar(dimMass/pow3(dimTime), 0)
        )
    );

    scalarField& Shs = tShs.ref();
    const scalarField& qinP = qinPrimary_;
    const scalarField& alpha = filmModel_.alpha();

    Shs = qinP*alpha;

    return tShs;
}
Beispiel #9
0
int main()
{

  Film film(401, 401);

  ngl::Vec3 pos(0, 0, 0);
  ngl::Vec3 lookAt(0, 0, 100);
  ngl::Vec3  up(0, 1, 0);



  Camera cam(pos, lookAt, up, 90.0, &film);

  Ray newRay(ngl::Vec3(0, 0, 0), ngl::Vec3(0, 0, 1));



  //cam.generateRay(200, 200, &newRay);
  std::cout << newRay.m_origin << newRay.m_direction << newRay.m_invDirection << std::endl;


  IsectData intersection;

  //film.show();

  //std::thread task(&Film::show, &film);

  auto mesh = Meshes::scene1();
  auto green = std::make_shared<Material>();
  green->m_diffuseColour = SDL_Color{0, 255, 0, 255};

  std::shared_ptr<Primative> scenePrim = std::make_shared<GeometricPrim>(mesh, green);

  Renderer new_renderer(&cam, &film, scenePrim);

  new_renderer.renderImage();
  //task.join();

  film.show();
  return EXIT_SUCCESS;
}
Beispiel #10
0
void ossimBuckeyeSensor::worldToLineSample(const ossimGpt& world_point,
	ossimDpt&       image_point) const
{
	if (traceDebug())  ossimNotify(ossimNotifyLevel_DEBUG) << "DEBUG ossimBuckeyeSensor::worldToLineSample: entering..." << std::endl;
	if((theBoundGndPolygon.getNumberOfVertices() > 0)&&
		(!theBoundGndPolygon.hasNans()))
	{
		if (!(theBoundGndPolygon.pointWithin(world_point)))
		{
			image_point.makeNan();
			return;
		}         
	}
	ossimEcefPoint g_ecf(world_point);
	ossimEcefVector ecfRayDir(g_ecf - theAdjEcefPlatformPosition);
	ossimColumnVector3d camRayDir (theCompositeMatrixInverse*ecfRayDir.data());


	double scale = -theFocalLength/camRayDir[2];
	ossimDpt film (scale*camRayDir[0], scale*camRayDir[1]);

	if (theLensDistortion.valid())
	{
		ossimDpt filmOut;
		theLensDistortion->distort(film, filmOut);
		film = filmOut;
	}

	ossimDpt f1(film + thePrincipalPoint);
	ossimDpt p1(f1.x/thePixelSize.x,
		-f1.y/thePixelSize.y);

	ossimDpt p0 (p1.x + theRefImgPt.x,
		p1.y + theRefImgPt.y);

	image_point = p0;
	if (traceDebug())  ossimNotify(ossimNotifyLevel_DEBUG) << "DEBUG ossimBuckeyeSensor::worldToLineSample: returning..." << std::endl;
}
void rspfBuckeyeSensor::worldToLineSample(const rspfGpt& world_point,
                                          rspfDpt&       image_point) const
{
#if 0
   if((theBoundGndPolygon.getNumberOfVertices() > 0)&&
      (!theBoundGndPolygon.hasNans()))
   {
      if (!(theBoundGndPolygon.pointWithin(world_point)))
      {
         image_point.makeNan();
         return;
      }         
   }
#endif
   rspfEcefPoint g_ecf(world_point);
   rspfEcefVector ecfRayDir(g_ecf - m_ecefPlatformPosition);
   rspfColumnVector3d camRayDir (m_compositeMatrixInverse*ecfRayDir.data());
   
   
   double scale = -m_focalLength/camRayDir[2];
   rspfDpt film (scale*camRayDir[0], scale*camRayDir[1]);
   
   if (m_lensDistortion.valid())
   {
      rspfDpt filmOut;
      m_lensDistortion->distort(film, filmOut);
      film = filmOut;
   }
   
   rspfDpt f1(film + m_principalPoint);
   rspfDpt p1(f1.x/m_pixelSize.x,
               -f1.y/m_pixelSize.y);
   
   rspfDpt p0 (p1.x + theRefImgPt.x,
                p1.y + theRefImgPt.y);
   
   image_point = p0;
}
void rspfApplanixEcefModel::imagingRay(const rspfDpt& image_point,
                                    rspfEcefRay&   image_ray) const
{
//    if(traceDebug())
//    {
//       rspfNotify(rspfNotifyLevel_DEBUG) << "rspfApplanixEcefModel::imagingRay: ..... entered" << std::endl;
//    }
    rspfDpt f1 ((image_point) - theRefImgPt);
   f1.x *= thePixelSize.x;
   f1.y *= -thePixelSize.y;
   rspfDpt film (f1 - thePrincipalPoint);
//    if(traceDebug())
//    {
//       rspfNotify(rspfNotifyLevel_DEBUG) << "pixel size   = " << thePixelSize << std::endl;
//       rspfNotify(rspfNotifyLevel_DEBUG) << "principal pt = " << thePrincipalPoint << std::endl;
//       rspfNotify(rspfNotifyLevel_DEBUG) << "film pt      = " << film << std::endl;
//    }
   if (theLensDistortion.valid())
   {
      rspfDpt filmOut;
      theLensDistortion->undistort(film, filmOut);
      film = filmOut;
   }
   
   rspfColumnVector3d cam_ray_dir (film.x,
                                    film.y,
                                    -theFocalLength);
   rspfEcefVector     ecf_ray_dir (theCompositeMatrix*cam_ray_dir);
   ecf_ray_dir = ecf_ray_dir*(1.0/ecf_ray_dir.magnitude());
  
   image_ray.setOrigin(theAdjEcefPlatformPosition);
   image_ray.setDirection(ecf_ray_dir);

//    if(traceDebug())
//    {
//       rspfNotify(rspfNotifyLevel_DEBUG) << "rspfApplanixEcefModel::imagingRay: ..... leaving" << std::endl;
//    }
}
void rspfBuckeyeSensor::imagingRay(const rspfDpt& image_point,
                                   rspfEcefRay&   image_ray) const
{
   rspfDpt f1 ((image_point) - theRefImgPt);
   f1.x *= m_pixelSize.x;
   f1.y *= -m_pixelSize.y;
   rspfDpt film (f1 - m_principalPoint);
   if (m_lensDistortion.valid())
   {
      rspfDpt filmOut;
      m_lensDistortion->undistort(film, filmOut);
      film = filmOut;
   }
   
   rspfColumnVector3d cam_ray_dir (film.x,
                                    film.y,
                                    -(m_focalLength+computeParameterOffset(6)));
   rspfEcefVector     ecf_ray_dir (m_compositeMatrix*cam_ray_dir);
   ecf_ray_dir = ecf_ray_dir*(1.0/ecf_ray_dir.magnitude());
   
   image_ray.setOrigin(m_ecefPlatformPosition);
   image_ray.setDirection(ecf_ray_dir);
}
Beispiel #14
0
void ossimApplanixUtmModel::worldToLineSample(const ossimGpt& world_point,
                                           ossimDpt&       image_point) const
{
   if((theBoundGndPolygon.getNumberOfVertices() > 0)&&
      (!theBoundGndPolygon.hasNans()))
   {
      if (!(theBoundGndPolygon.pointWithin(world_point)))
      {
         image_point.makeNan();
//          image_point = extrapolate(world_point);
         return;
      }         
   }
   ossimEcefPoint g_ecf(world_point);
   ossimEcefVector ecfRayDir(g_ecf - theAdjEcefPlatformPosition);
   ossimColumnVector3d camRayDir (theCompositeMatrixInverse*ecfRayDir.data());
   
      
   double scale = -theFocalLength/camRayDir[2];
   ossimDpt film (scale*camRayDir[0], scale*camRayDir[1]);
      
   if (theLensDistortion.valid())
   {
      ossimDpt filmOut;
      theLensDistortion->distort(film, filmOut);
      film = filmOut;
    }
   
     ossimDpt f1(film + thePrincipalPoint);
    ossimDpt p1(f1.x/thePixelSize.x,
                -f1.y/thePixelSize.y);

    ossimDpt p0 (p1.x + theRefImgPt.x,
                 p1.y + theRefImgPt.y);
    
    image_point = p0;
}
Beispiel #15
0
int Filmotheque::charger(string p_fichier)
{
	ifstream fichier(p_fichier.c_str(), ios::in);  // on ouvre le fichier en lecture
	
	if(fichier)  // si l'ouverture a réussi
	{       
		string ligne;
		int compteur = 0;
		
		string m_titre;
		bool m_dvd = false;
		string m_fichier = "";
		string m_realisateur = "";
		string m_annee = "";
		string m_resume = "";
		
        while ( getline( fichier, ligne ) )
        {
			switch (compteur)
			{
				case 0:
					m_titre = ligne;
					cout << ligne << endl;
					break;
				case 1:
					if (ligne == "dvd") m_dvd = true;
					else 
					{
						m_dvd = false;
						m_fichier = ligne;
					}
					cout << ligne << endl;
					break;
 				case 2:
 					m_realisateur = ligne;
					cout << ligne << endl;
 					break;
				case 3:
					m_annee = ligne;
					cout << ligne << endl;
					break;
				case 4:
					while (ligne != "**")
					{
						m_resume += ligne;
						getline( fichier, ligne );
						if (ligne != "**") m_resume += "\n";
					}
					Film film(m_titre, m_dvd, m_fichier, m_realisateur, m_annee, m_resume);
					m_filmotheque.push_back(film);
					m_resume = "";
					compteur = -1;
					cout << ligne << endl;
					break;
			}
			compteur ++;
        }

		fichier.close();  // on ferme le fichier
		return 0;
	}
	else  // sinon
	{
		cerr << "Impossible d'ouvrir le fichier !" << endl;
		return -1;
	}
}
Beispiel #16
0
int 
main (int argc, char **argv)
{
        extern char *optarg;
        extern int optind, opterr, optopt;


        film f = film ();

        // Initialize threshold to a sensible default value
        f.threshold=60;
        
        for (;;)
        {
                int c = getopt (argc, argv, "?ht:i:o:s:flwvmr");

                if (c < 0)
                {
                        break;
                }

                switch (c)
                {
                        case '?':
                        case 'h':
                                show_help (argv);
                                exit (EXIT_SUCCESS);
                                break;

                                /*	 choix des thumbs   */
                        case 'f':
                                f.set_first_img(true);
                                break;

                        case 'l':
                                f.set_last_img(true);
                                break;

                                /* generer l'image en resolution native */
                        case 'r':
                                f.set_shot(true);
                                break;

                                /* generer l'image en miniature */
                        case 'm':
                                f.set_thumb(true);
                                break;

                                /* generer le xml pour les donnees video */
                        case 'v':
                                f.set_video(true);
                                break;

                                /* generer le xml pour les donnees audio */
                        case 'w':
                                f.set_audio(true) ;
                                break;

                                /*  Seuil */
                        case 's':
                                f.set_threshold(atoi (optarg));
                                break;

                        case 'i':
                                f.set_ipath(optarg);
                                break;

                        case 'o':
                                f.set_opath(optarg);
                                break;

                        default:
                                break;
                }

        }

        /*  Traitement des erreurs */
          if (f.get_ipath().empty())
                {
                        cerr << "Please specify an input file" << endl;
                        show_help (argv);
                        exit(EXIT_FAILURE);
                }
           if (f.get_opath().empty())
                {
                        cerr << "Please specify an output path" << endl;
                        show_help (argv);
                        exit(EXIT_FAILURE);
                }

        xml *x = new xml (&f);
        f.x = x;

        f.shotlog("Processing movie.");
        f.process ();
        string xml_path  = f.global_path;
        xml_path += "/result.xml";
        f.x->write_data (xml_path);
        string finished_path = f.global_path;
        finished_path += "/finished";
        FILE *fd_finished = fopen(finished_path.c_str(),"w");
        fprintf(fd_finished, "0\n");
        fclose(fd_finished);
        exit (0);
}
Beispiel #17
0
static void
run (const gchar      *name,
     gint              nparams,
     const GimpParam  *param,
     gint             *nreturn_vals,
     GimpParam       **return_vals)
{
  static GimpParam  values[2];
  GimpPDBStatusType status = GIMP_PDB_SUCCESS;
  gint32            image_ID;
  gint              k;

  INIT_I18N ();

  run_mode = param[0].data.d_int32;

  *nreturn_vals = 2;
  *return_vals  = values;

  values[0].type          = GIMP_PDB_STATUS;
  values[0].data.d_status = status;
  values[1].type          = GIMP_PDB_IMAGE;
  values[1].data.d_int32  = -1;

  switch (run_mode)
    {
    case GIMP_RUN_INTERACTIVE:
      /*  Possibly retrieve data  */
      gimp_get_data (PLUG_IN_PROC, &filmvals);

      /*  First acquire information with a dialog  */
      if (! film_dialog (param[1].data.d_int32))
        return;
      break;

    case GIMP_RUN_NONINTERACTIVE:
      /*  Make sure all the arguments are there!  */
      /* Also we want to have some images to compose */
      if ((nparams != 12) || (param[10].data.d_int32 < 1))
        {
          status = GIMP_PDB_CALLING_ERROR;
        }
      else
        {
          filmvals.keep_height       = (param[3].data.d_int32 <= 0);
          filmvals.film_height       = (filmvals.keep_height ?
                                        128 : param[3].data.d_int32);
          filmvals.film_color        = param[4].data.d_color;
          filmvals.number_start      = param[5].data.d_int32;
          g_strlcpy (filmvals.number_font, param[6].data.d_string, FONT_LEN);
          filmvals.number_color      = param[7].data.d_color;
          filmvals.number_pos[0]     = param[8].data.d_int32;
          filmvals.number_pos[1]     = param[9].data.d_int32;
          filmvals.num_images        = param[10].data.d_int32;
          if (filmvals.num_images > MAX_FILM_PICTURES)
            filmvals.num_images = MAX_FILM_PICTURES;
          for (k = 0; k < filmvals.num_images; k++)
            filmvals.image[k] = param[11].data.d_int32array[k];
        }
      break;

    case GIMP_RUN_WITH_LAST_VALS:
      /*  Possibly retrieve data  */
      gimp_get_data (PLUG_IN_PROC, &filmvals);
      break;

    default:
      break;
    }

  if (! check_filmvals ())
    status = GIMP_PDB_CALLING_ERROR;

  if (status == GIMP_PDB_SUCCESS)
    {
      gimp_progress_init (_("Composing images"));

      image_ID = film ();

      if (image_ID < 0)
        {
          status = GIMP_PDB_EXECUTION_ERROR;
        }
      else
        {
          values[1].data.d_int32 = image_ID;
          gimp_image_undo_enable (image_ID);
          gimp_image_clean_all (image_ID);
          if (run_mode != GIMP_RUN_NONINTERACTIVE)
            gimp_display_new (image_ID);
        }

      /*  Store data  */
      if (run_mode == GIMP_RUN_INTERACTIVE)
        gimp_set_data (PLUG_IN_PROC, &filmvals, sizeof (FilmVals));
    }

  values[0].data.d_status = status;
}
SearchEngine::SearchEngine()
{
	//read book records from book.txt and take in 10 pipe separated fields 
	ifstream book("book.txt");
      while(!getline(book, callNumber,'|').eof())
      {
        getline(book, title,'|');
        getline(book, subjects,'|');
        getline(book, author,'|');
        getline(book, description,'|');
        getline(book, publisher,'|');
        getline(book, city,'|');
        getline(book, year,'|');
        getline(book, series,'|');
        getline(book, notes, '\n');
            Book* objBook = new Book(callNumber, title, subjects, author, description, publisher, city, year, series, notes);
            CardCatalog.push_back(objBook);
      }
      book.close();
      
      //read periodic records from periodic.txt and take in 12 pipe separated fields
      ifstream periodic("periodic.txt");
      while(!getline(periodic, periodic_callNumber,'|').eof())
      {
        getline(periodic, periodic_title,'|');
        getline(periodic, periodic_subjects,'|');
        getline(periodic, periodic_author,'|');
        getline(periodic, periodic_description,'|');
        getline(periodic, periodic_publisher,'|');
        getline(periodic, periodic_publishing_history,'|');
        getline(periodic, periodic_series,'|');
        getline(periodic, periodic_notes,'|');
        getline(periodic, periodic_related_titles,'|');
        getline(periodic, periodic_other_forms_of_title,'|');
        getline(periodic, periodic_govt_doc_number, '\n');
            Periodic* objPeriodic = new Periodic(periodic_callNumber, periodic_title, periodic_subjects, periodic_author, 
            periodic_description, periodic_publisher, periodic_publishing_history, periodic_series, periodic_notes, 
            periodic_related_titles, periodic_other_forms_of_title, periodic_govt_doc_number);
            CardCatalog.push_back(objPeriodic);

      }
      periodic.close();
      
      //read video records from video.txt and take in 8 pipe separated fields
      ifstream video("video.txt");
      while(!getline(video, video_callNumber,'|').eof())
      {
        getline(video, video_title,'|');
        getline(video, video_subjects,'|');
        getline(video, video_description,'|');
        getline(video, video_distributor,'|');
        getline(video, video_notes,'|');
        getline(video, video_series,'|');
        getline(video, video_label, '\n');
            Video* objVideo = new Video(video_callNumber, video_title, video_subjects, video_description, 
            video_distributor, video_notes, video_series, video_label);
            CardCatalog.push_back(objVideo);
	
      }
      video.close();
      
      //read film records from periodic.txt and take in 6 pipe separated fields
      ifstream film("film.txt");
      while(!getline(film, film_callNumber,'|').eof())
      {
        getline(film, film_title,'|');
        getline(film, film_subjects,'|');
        getline(film, film_director,'|');
        getline(film, film_notes,'|');
        getline(film, film_year, '\n');
            Film* objFilm = new Film(film_callNumber, film_title, film_subjects, film_director, film_notes, film_year);
            CardCatalog.push_back(objFilm);

      }
      film.close();
}