Exemplo n.º 1
0
void ConfigFile::parseFile(const char *filename)
{
    std::ifstream file;
    char buf[MAX_CHARS_PER_LINE];

    m_fileIsParsed = true;
    m_valueMap.clear();

    file.open(filename);
    if (!file.good())
        return;

    // read tokens from the file and form option, value pairs
    file.getline(buf, MAX_CHARS_PER_LINE);
    while (!file.eof())
    {
        char option[512];
        char value[512];

        char *pComment;

        //discard any comments delimited by '#' in the line
        pComment = strchr(buf, '#');
        if (pComment)
            *pComment = '\0';

        if (sscanf(buf, " %511[^\n\t =] = %511[^\n \t]", option, value) == 2)
        {
            std::string optStr(option);
            std::string valStr(value);
            m_valueMap[optStr] = valStr;
        }
        file.getline(buf, MAX_CHARS_PER_LINE);
    }
}
void cSemaineEmmaSender::sendSingleEmotionDim( cComponentMessage *_msg, const char * set, std::string dim )
{
  char strtmp[50];
  sprintf(strtmp,"%.2f",_msg->floatData[0]);
  std::string valStr(strtmp);

  sprintf(strtmp,"%ld",smileTimeToSemaineTime(_msg->userTime1));
  std::string startTm(strtmp);
  sprintf(strtmp,"%ld",(long long)round((_msg->userTime2 - _msg->userTime1)*1000.0));
  std::string duration(strtmp);

  // Create and fill a simple EMMA EmotionML document
  XERCESC_NS::DOMDocument * document = XMLTool::newDocument(EMMA::E_EMMA, EMMA::namespaceURI, EMMA::version);
  XERCESC_NS::DOMElement * interpretation = XMLTool::appendChildElement(document->getDocumentElement(), EMMA::E_INTERPRETATION);
  XMLTool::setAttribute(interpretation, EMMA::A_OFFSET_TO_START, startTm);
  XMLTool::setAttribute(interpretation, EMMA::A_DURATION, duration);
  XMLTool::setPrefix(interpretation, "emma");

  XERCESC_NS::DOMElement * emotion = XMLTool::appendChildElement(interpretation, EmotionML::E_EMOTION, EmotionML::namespaceURI);
  XMLTool::setPrefix(emotion, "emotion");

  XERCESC_NS::DOMElement * dimensions = XMLTool::appendChildElement(emotion, EmotionML::E_DIMENSIONS, EmotionML::namespaceURI);
  XMLTool::setAttribute(dimensions, EmotionML::A_SET, set);
  XMLTool::setPrefix(dimensions, "emotion");

  XERCESC_NS::DOMElement * dimension = XMLTool::appendChildElement(dimensions, dim, EmotionML::namespaceURI);
  XMLTool::setAttribute(dimension, EmotionML::A_VALUE, valStr);
  XMLTool::setPrefix(dimension, "emotion");

  // Now send it
  sendDocument(document);
}
Exemplo n.º 3
0
void cSemaineWordSender::sendKeywords( cComponentMessage *_msg )
{
  int i;
  juliusResult *k = (juliusResult *)(_msg->custData);
  if (k==NULL) return;

  int nW = 0;
  for (i=0; i<k->numW; i++) {
    // check for non-verbals.... and remove them, only preceed if words are left
    if (k->word[i][0] != '*') nW++;
  }
  if (nW == 0) return;

  char strtmp[150];
  sprintf(strtmp,"%.2f",_msg->floatData[0]);
  std::string valStr(strtmp);
  long long startTime = smileTimeToSemaineTime(_msg->userTime1);
  sprintf(strtmp,"%ld",startTime);
  std::string startTm(strtmp);
  sprintf(strtmp,"%ld",(long long)round((_msg->userTime2 - _msg->userTime1)*1000.0));
  std::string duration(strtmp);

  // Create and fill a simple EMMA document
  XERCESC_NS::DOMDocument * document = XMLTool::newDocument(EMMA::E_EMMA, EMMA::namespaceURI, EMMA::version);
  XMLTool::setPrefix(document->getDocumentElement(), "emma");

  XERCESC_NS::DOMElement * sequence = XMLTool::appendChildElement(document->getDocumentElement(), EMMA::E_SEQUENCE);
  XMLTool::setAttribute(sequence, EMMA::A_OFFSET_TO_START, startTm);
  XMLTool::setAttribute(sequence, EMMA::A_DURATION, duration);
  XMLTool::setPrefix(sequence, "emma");

  for (i=0; i<k->numW; i++) {

    // split combined keywords (TALK_TO_POPPY) etc. at the special character "_" and put them in individual tags
    /*
    char * tr = strdup(k->word[i]);
    char * tmp = tr;
    char * x = NULL;
    do {
      x = (char *)strchr(tmp, '_');
      // separate at '_'
      if (x!=NULL) {
        *x = 0;
      }

      // remove spaces
      //while (*tmp==' ') { tmp++; }
      //size_t l = strlen(tmp);
      //while (tmp[l-1] == ' ') { tmp[l-1] = 0; l--; }

      // append an xml keyword tag
      XERCESC_NS::DOMElement * interpretation = XMLTool::appendChildElement(sequence, EMMA::E_INTERPRETATION);
      sprintf(strtmp,"%ld",startTime + (long long)round((k->start[i])*1000.0));
      std::string offs(strtmp);
      sprintf(strtmp,"%s",tmp);
      std::string keyword(strtmp);
      sprintf(strtmp,"%.3f",k->conf[i]);
      std::string confidence(strtmp);
      XMLTool::setAttribute(interpretation, EMMA::A_OFFSET_TO_START, offs);
      XMLTool::setAttribute(interpretation, EMMA::A_TOKENS, keyword);
      XMLTool::setAttribute(interpretation, EMMA::A_CONFIDENCE, confidence);
      XMLTool::setPrefix(interpretation, "emma");


      // analyse next part of string, if present
      if (x != NULL) {
        tmp = x+1;
      }
    } while (x !=  NULL);
    free(tr);


*/
    // one word:
    
    if (k->word[i][0] != '*') {

      XERCESC_NS::DOMElement * interpretation = XMLTool::appendChildElement(sequence, EMMA::E_INTERPRETATION);
      sprintf(strtmp,"%ld",startTime + (long long)round((k->start[i])*1000.0));
      std::string offs(strtmp);
      sprintf(strtmp,"%s",k->word[i]);
      std::string keyword(strtmp);
      sprintf(strtmp,"%.3f",k->conf[i]);
      std::string confidence(strtmp);
      XMLTool::setAttribute(interpretation, EMMA::A_OFFSET_TO_START, offs);
      XMLTool::setAttribute(interpretation, EMMA::A_TOKENS, keyword);
      XMLTool::setAttribute(interpretation, EMMA::A_CONFIDENCE, confidence);
      XMLTool::setPrefix(interpretation, "emma");

    }
  }


  // Now send it
  sendDocument(document);
}
Exemplo n.º 4
0
void Blender::actionListenerCallback(const String &message)
{
    int mixer_id = 0;
    int channel_id = 0;
    int mixer_id_index = message.indexOfChar(':') + 1;
    int channel_id_index = message.indexOfChar('+') + 1;
    String idStr(String::empty);
    String type(message.upToFirstOccurrenceOf(":", false, false));
    String valStr(message.fromLastOccurrenceOf("=", false, false));
    uint32 value = valStr.getIntValue();
    bool bUpdateGui = false;

    if (mixer_id_index >= 0)
    {
        idStr = message.substring(mixer_id_index, mixer_id_index+2);
        mixer_id = idStr.getIntValue();

        if (channel_id_index >= 0)
        {
            idStr = message.substring(channel_id_index, channel_id_index+2);
            channel_id = idStr.getIntValue();

            if (0 == type.compare("gain"))
            {
                int gain = jmin<int>(130, (int)value);
                m_bl_desc.mixer[mixer_id].channel[channel_id].gain = gain;
                m_bl_desc.mixer[mixer_id].channel[channel_id].gain_dbv = m_dBvMap[gain];
            }
            else if (0 == type.compare("master_gain"))
            {
                // master gain
                int gain = jmin<int>(130, (int)value);
                m_bl_desc.mixer[mixer_id].master.gain = gain;
                m_bl_desc.mixer[mixer_id].master.gain_dbv = m_dBvMap[gain];
            }
            else if (0 == type.compare("pan"))
            {
                // channel strip pan
                m_bl_desc.mixer[mixer_id].channel[channel_id].pan = (int)value;
            }
            else if (0 == type.compare("master_mute"))
            {
                // master out mute
                m_bl_desc.mixer[mixer_id].master.muting = (value != 0);
            }
            else if (0 == type.compare("mute"))
            {
                uint32 muting = (uint32)value;
                uint32 channel_bit = 1<<channel_id;

                // channel strip mute
                if (muting)
                {
                    // mute
                    m_bl_desc.mixer[mixer_id].mutes |= channel_bit;
                    m_bl_desc.mixer[mixer_id].was_muted |= channel_bit;

                    // handle solos
                    if (0 != m_bl_desc.mixer[mixer_id].solos)
                    {
                        if (m_bl_desc.mixer[mixer_id].solos == channel_bit)
                        {
                            // a solo was active only on that channel, unsolo all and restore mutes
                            m_bl_desc.mixer[mixer_id].solos = 0;
                            m_bl_desc.mixer[mixer_id].mutes = m_bl_desc.mixer[mixer_id].was_muted;
                            bUpdateGui = true;
                        }
                        // other solos are active, unsolo only this one if soloed
                        else if (m_bl_desc.mixer[mixer_id].solos & channel_bit)
                        {
                            m_bl_desc.mixer[mixer_id].solos ^= channel_bit;
                            m_bl_desc.mixer[mixer_id].mutes |= (m_bl_desc.mixer[mixer_id].was_muted & channel_id);
                            bUpdateGui = true;
                        }
                    }
                }
                else
                {
                    // unmute
                    m_bl_desc.mixer[mixer_id].mutes ^= channel_bit;
                    m_bl_desc.mixer[mixer_id].was_muted ^= channel_bit;

                    // handle solos
                    if (0 != m_bl_desc.mixer[mixer_id].solos)
                    {
                        // a solo was active on another channel, solo this one too
                        m_bl_desc.mixer[mixer_id].solos |= channel_bit;
                        bUpdateGui = true;
                    }
                }
            }
            else if (0 == type.compare("solo"))
            {
                uint32 soloing = (uint32)value;
                uint32 channel_bit = 1<<channel_id;

                if (soloing)
                {
                    // is this the first soloed channel?
                    if (0 == m_bl_desc.mixer[mixer_id].solos)
                    {
                        // solo
                        m_bl_desc.mixer[mixer_id].solos |= channel_bit;

                        // mute all others
                        m_bl_desc.mixer[mixer_id].mutes = ~channel_bit;
                        bUpdateGui = true;
                    }
                    else
                    {
                        // solo this one also
                        m_bl_desc.mixer[mixer_id].solos |= channel_bit;
                        m_bl_desc.mixer[mixer_id].mutes ^= channel_bit;
                        bUpdateGui = true;
                    }
                }
                else
                {
                    // channel was soloed

                    // was this the only soloed channel?
                    if (m_bl_desc.mixer[mixer_id].solos == channel_bit)
                    {
                        // unsolo all
                        m_bl_desc.mixer[mixer_id].solos = 0;

                        // restore mute states
                        m_bl_desc.mixer[mixer_id].mutes = m_bl_desc.mixer[mixer_id].was_muted;
                        bUpdateGui = true;
                    }
                    else
                    {
                        // other channels are soloed, so unsolo this one only
                        m_bl_desc.mixer[mixer_id].solos ^= channel_bit;

                        // restore mute state
                        if (m_bl_desc.mixer[mixer_id].was_muted & channel_bit)
                        {
                            m_bl_desc.mixer[mixer_id].mutes |= channel_bit;
                        }
                        bUpdateGui = true;
                    }
                }

            }
            else if (0 == type.compare("dirout"))
            {
                m_bl_desc.mixer[mixer_id].direct_out = (0 != value);

                setDirectOut(mixer_id);
                sendDirout(mixer_id, m_bl_desc.mixer[mixer_id].direct_out);
                String msg(String::formatted("dirout: mixer id:%d, %d", mixer_id, m_bl_desc.mixer[mixer_id].direct_out));
                EventLogger::getInstance()->logMessage(msg);
            }
            else if (0 == type.compare("reset"))
            {
                for (int i=0; i<m_bl_desc.mixer[mixer_id].num_channels; i++)
                {
                    m_bl_desc.mixer[mixer_id].channel[i].gain = 101;
                    m_bl_desc.mixer[mixer_id].channel[i].gain_dbv = m_dBvMap[101];
                    sendChGain(mixer_id, i, 101);
                    m_bl_desc.mixer[mixer_id].channel[i].pan = 0;
                    sendChPan(mixer_id, i, 0);
                }
                m_bl_desc.mixer[mixer_id].mutes = 0;
                m_bl_desc.mixer[mixer_id].was_muted = 0;
                sendChMutes(mixer_id, m_bl_desc.mixer[mixer_id].mutes);
                m_bl_desc.mixer[mixer_id].solos = 0;
                sendChSolos(mixer_id, m_bl_desc.mixer[mixer_id].solos);

                m_bl_desc.mixer[mixer_id].master.gain = 101;
                m_bl_desc.mixer[mixer_id].master.gain_dbv = m_dBvMap[101];
                sendMasterGain(mixer_id, 101);
                m_bl_desc.mixer[mixer_id].master.muting = false;
                sendMasterMute(mixer_id, false);

                m_bl_desc.mixer[mixer_id].direct_out = 0;
                setDirectOut(mixer_id);
                sendDirout(mixer_id, 0);

                bUpdateGui = true;
            }
            // update mixer coeff's
            mixer_cfg(mixer_id);
        }

        if (bUpdateGui)
        {
            sendChMutes(mixer_id, m_bl_desc.mixer[mixer_id].mutes);
            sendChSolos(mixer_id, m_bl_desc.mixer[mixer_id].solos);
        }
    }
}