void SAMAV1ISO7816Commands::changeKUCEntry(unsigned char kucno, std::shared_ptr<SAMKucEntry> kucentry, std::shared_ptr<DESFireKey> key) { if (d_crypto->d_sessionKey.size() == 0) THROW_EXCEPTION_WITH_LOG(LibLogicalAccessException, "Failed: AuthentificationHost have to be done before use such command."); unsigned char data[6] = {}; memcpy(data, &kucentry->getKucEntryStruct(), 6); std::vector<unsigned char> vectordata(data, data + 6); std::vector<unsigned char> encdatalittle; if (key->getKeyType() == DF_KEY_DES) encdatalittle = d_crypto->sam_encrypt(d_crypto->d_sessionKey, vectordata); else encdatalittle = d_crypto->sam_crc_encrypt(d_crypto->d_sessionKey, vectordata, key); unsigned char proMas = kucentry->getUpdateMask(); unsigned char cmd[] = { d_cla, 0xcc, kucno, proMas, 0x08 }; std::vector<unsigned char> cmd_vector(cmd, cmd + 5), result; cmd_vector.insert(cmd_vector.end(), encdatalittle.begin(), encdatalittle.end()); result = transmit(cmd_vector); if (result.size() >= 2 && (result[result.size() - 2] != 0x90 || result[result.size() - 1] != 0x00)) THROW_EXCEPTION_WITH_LOG(LibLogicalAccessException, "changeKUCEntry failed."); }
void SAMAV1ISO7816Commands::changeKeyEntry(unsigned char keyno, std::shared_ptr<SAMKeyEntry<KeyEntryAV1Information, SETAV1> > keyentry, std::shared_ptr<DESFireKey> key) { if (d_crypto->d_sessionKey.size() == 0) THROW_EXCEPTION_WITH_LOG(LibLogicalAccessException, "Failed: AuthentificationHost have to be done before use such command."); unsigned char proMas = keyentry->getUpdateMask(); size_t buffer_size = keyentry->getLength() + sizeof(KeyEntryAV1Information); unsigned char *data = new unsigned char[buffer_size](); memcpy(data, &*(keyentry->getData()), keyentry->getLength()); memcpy(data + 48, &keyentry->getKeyEntryInformation(), sizeof(KeyEntryAV1Information)); std::vector<unsigned char> iv; iv.resize(16, 0x00); std::vector<unsigned char> vectordata(data, data + buffer_size); delete[] data; std::vector<unsigned char> encdatalittle; if (key->getKeyType() == DF_KEY_DES) encdatalittle = d_crypto->sam_encrypt(d_crypto->d_sessionKey, vectordata); else encdatalittle = d_crypto->sam_crc_encrypt(d_crypto->d_sessionKey, vectordata, key); unsigned char cmd[] = { d_cla, 0xc1, keyno, proMas, (unsigned char)(encdatalittle.size()) }; std::vector<unsigned char> cmd_vector(cmd, cmd + 5), result; cmd_vector.insert(cmd_vector.end(), encdatalittle.begin(), encdatalittle.end()); result = transmit(cmd_vector); if (result.size() >= 2 && (result[result.size() - 2] != 0x90 || result[result.size() - 1] != 0x00)) THROW_EXCEPTION_WITH_LOG(LibLogicalAccessException, "changeKeyEntry failed."); }
/****************************************************************************** Description.: this thread worker grabs a frame and copies it to the global buffer Input Value.: unused Return Value: unused, always NULL ******************************************************************************/ void *cam_thread(void *arg) { g_settings.init(); setCameraExposure(); CVideoFrame* pFrame = NULL; #ifndef TEST_USE_JPEGS_NOT_CAMERA int width = VIEW_PIXEL_X_WIDTH; int height = VIEW_PIXEL_Y_HEIGHT; IplImage * img = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); // obraz OpenCV #endif frameGrinder.init(); #ifdef TEST_USE_JPEGS_NOT_CAMERA std::string sBasePath = "/home/"; sBasePath += HOME_NAME; std::string sPath = sBasePath; sPath += "/0243-20150125-22-21-46.jpg"; //sPath += "/0007-20150125-22-36-25.jpg"; cv::Mat frame1 = cv::imread(sPath.c_str(), CV_LOAD_IMAGE_COLOR); if (frame1.empty()) { dbgMsg_s("Failed to read image data from a file1\n"); } sPath = sBasePath; sPath += "/0243-20150125-22-21-46.jpg"; //sPath += "/0007-20150125-22-36-25.jpg"; cv::Mat frame2 = cv::imread(sPath.c_str(), CV_LOAD_IMAGE_COLOR); if (frame2.empty()) { dbgMsg_s("Failed to read image data from a file2\n"); } bool toggle = false; #endif context *pcontext = (context*) arg; pglobal = pcontext->pglobal; /* set cleanup handler to cleanup allocated ressources */ pthread_cleanup_push(cam_cleanup, pcontext); while (!pglobal->stop) { while (pcontext->videoIn->streamingState == STREAMING_PAUSED) { usleep(1); // maybe not the best way so FIXME } #ifdef TEST_USE_JPEGS_NOT_CAMERA if (frameGrinder.safeGetFreeFrame(&pFrame)) { if (toggle) { pFrame->m_frame = frame1; } else { pFrame->m_frame = frame2; } toggle = (!toggle); if (!pFrame->m_frame.empty()) { frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_WAIT_FOR_BLOB_DETECT); } else { dbgMsg_s("Frame is empty\n"); frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_FREE); } frameGrinder.m_testMonitor.m_nTasksDone[CTestMonitor::TASK_DONE_CAMERA]++; } #else /* grab a frame */ if (uvcGrab(pcontext->videoIn) < 0) { IPRINT("Error grabbing frames\n"); exit(EXIT_FAILURE); } DBG("received frame of size: %d from plugin: %d\n", pcontext->videoIn->buf.bytesused, pcontext->id); /* * Workaround for broken, corrupted frames: * Under low light conditions corrupted frames may get captured. * The good thing is such frames are quite small compared to the regular pictures. * For example a VGA (640x480) webcam picture is normally >= 8kByte large, * corrupted frames are smaller. */ if (pcontext->videoIn->buf.bytesused < minimum_size) { DBG("dropping too small frame, assuming it as broken\n"); continue; } if (g_settings.isDynamicSettingsEnabled()) { g_settings.getValueFromFile(CSetting::SETTING_EXPOSURE); } if(g_settings.isValueChanged(CSetting::SETTING_EXPOSURE)) { setCameraExposure(); } #ifdef NO_CV_JUST_STREAM_THE_CAMERA /* copy JPG picture to global buffer */ pthread_mutex_lock(&pglobal->in[pcontext->id].db); /* * If capturing in YUV mode convert to JPEG now. * This compression requires many CPU cycles, so try to avoid YUV format. * Getting JPEGs straight from the webcam, is one of the major advantages of * Linux-UVC compatible devices. */ if (pcontext->videoIn->formatIn == V4L2_PIX_FMT_YUYV) { DBG("compressing frame from input: %d\n", (int) pcontext->id); pglobal->in[pcontext->id].size = compress_yuyv_to_jpeg(pcontext->videoIn, pglobal->in[pcontext->id].buf, pcontext->videoIn->framesizeIn, gquality); } else { DBG("copying frame from input: %d\n", (int) pcontext->id); pglobal->in[pcontext->id].size = memcpy_picture(pglobal->in[pcontext->id].buf, pcontext->videoIn->tmpbuffer, pcontext->videoIn->buf.bytesused); } /* copy this frame's timestamp to user space */ pglobal->in[pcontext->id].timestamp = pcontext->videoIn->buf.timestamp; /* signal fresh_frame */ pthread_cond_broadcast(&pglobal->in[pcontext->id].db_update); pthread_mutex_unlock(&pglobal->in[pcontext->id].db); #else // #ifndef NO_CV_JUST_STREAM_THE_CAMERA if (frameGrinder.safeGetFreeFrame(&pFrame)) { std::vector<uchar> vectordata(pcontext->videoIn->tmpbuffer, pcontext->videoIn->tmpbuffer + (height * width)); cv::Mat data_mat(vectordata, false); cv::Mat image(cv::imdecode(data_mat, 1)); //put 0 if you want greyscale pFrame->m_frame = image; if (!pFrame->m_frame.empty()) { frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_WAIT_FOR_BLOB_DETECT); } else { dbgMsg_s("Frame is empty\n"); frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_FREE); } frameGrinder.m_testMonitor.m_nTasksDone[CTestMonitor::TASK_DONE_CAMERA]++; } #endif // #ifndef NO_CV_JUST_STREAM_THE_CAMERA #endif // TEST_USE_JPEGS_NOT_CAMERA } DBG("leaving input thread, calling cleanup function now\n"); pthread_cleanup_pop(1); return NULL; }