bool libname::COggStreamSample::Load( const char *szFilename ) { vorbis_info * pInfo; if ( !m_pFile ) m_pFile = fopen( szFilename, "rb"); if( ov_open_callbacks( m_pFile , &m_OggFile, NULL, 0, OV_CALLBACKS_DEFAULT) < 0) { cerr << "Input does not appear to be an Ogg bitstream." << endl; return false; } pInfo = ov_info(&m_OggFile,-1); SetNumChannels( pInfo->channels); // number of channels SetFreq( pInfo->rate); // The frequency of the sampling rate // Check the number of channels... always use 16-bit samples if (GetNumChannels() == 1) SetFormat( AL_FORMAT_MONO16 ); else SetFormat( AL_FORMAT_STEREO16 ); cerr << "Freq: " << GetFreq() << endl; cerr << "Channels: " << GetNumChannels() << endl; cerr << "Encoded: " << ov_comment(&m_OggFile,-1)->vendor << endl; m_lFileSize = ov_raw_total( &m_OggFile, -1); return true; }
UnicodeString ICUSQLite3Utility::Format( const UDate& dateTime, const EIcuSqlite3DTStorageTypes type) { if(!m_dtFormat) { return UNICODE_STRING_SIMPLE(""); } if(ICUSQLITE_DATETIME_ISO8601 == type) { //Format as full date/time. See if we have milliseconds and // format accordingly. if(0 == (static_cast<int64_t>(dateTime) % 1000)) { SetFormat(DATE_FORMAT_ISO8601_DATETIME); } else { SetFormat(DATE_FORMAT_ISO8601_DATETIME_MILLISECONDS); } } else if(ICUSQLITE_DATETIME_ISO8601_TIME == type) { //Format as time only. See if we have milliseconds and // format accordingly. if(0 == (static_cast<int64_t>(dateTime) % 1000)) { SetFormat(DATE_FORMAT_ISO8601_TIME); } else { SetFormat(DATE_FORMAT_ISO8601_TIME_MILLISECONDS); } } else { SetFormat(static_cast<EIcuSqlite3FormatIndex>(type)); } UnicodeString result; return m_dtFormat->format(dateTime, result); }
// Format negotiation HRESULT CTimeStretchFilter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder) { if (!pwfx) return VFW_E_TYPE_NOT_ACCEPTED; #ifdef INTEGER_SAMPLES // only accept 16bit int if (pwfx->Format.wBitsPerSample != 16 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_PCM) return VFW_E_TYPE_NOT_ACCEPTED; #else // only accept 32bit float if (pwfx->Format.wBitsPerSample != 32 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_IEEE_FLOAT) return VFW_E_TYPE_NOT_ACCEPTED; #endif if (FormatsEqual(pwfx, m_pInputFormat)) { *pChOrder = m_chOrder; return S_OK; } bool bApplyChanges = (nApplyChangesDepth != 0); if (nApplyChangesDepth != INFINITE && nApplyChangesDepth > 0) nApplyChangesDepth--; HRESULT hr = m_pNextSink->NegotiateFormat(pwfx, nApplyChangesDepth, pChOrder); if (FAILED(hr)) return hr; hr = VFW_E_CANNOT_CONNECT; if (!pwfx) return SetFormat(NULL); if (bApplyChanges) { LogWaveFormat(pwfx, "TS - applying "); AM_MEDIA_TYPE tmp; HRESULT result = CreateAudioMediaType((WAVEFORMATEX*)pwfx, &tmp, true); if (SUCCEEDED(result)) { if (m_pMediaType) DeleteMediaType(m_pMediaType); m_pMediaType = CreateMediaType(&tmp); } SetInputFormat(pwfx); SetOutputFormat(pwfx); SetFormat(pwfx); } else LogWaveFormat(pwfx, "TS - "); m_chOrder = *pChOrder; return S_OK; }
bool ReadTagAPE(FILE_INFO *pFileMP3) { GetValues_mp3infp(pFileMP3); CTag_Ape ape; if (ape.Load(GetFullPath(pFileMP3)) != ERROR_SUCCESS) { return false; } CString buff; // トラック名 ape.GetComment(CTag_Ape::APE_TAG_FIELD_TITLE, buff); SetTrackNameSI(pFileMP3, buff); // アーティスト名 ape.GetComment(CTag_Ape::APE_TAG_FIELD_ARTIST, buff); SetArtistNameSI(pFileMP3, buff); // アルバム名 ape.GetComment(CTag_Ape::APE_TAG_FIELD_ALBUM, buff); SetAlbumNameSI(pFileMP3, buff); // リリース年号 ape.GetComment(CTag_Ape::APE_TAG_FIELD_YEAR, buff); SetYearSI(pFileMP3, buff); // コメント ape.GetComment(CTag_Ape::APE_TAG_FIELD_COMMENT, buff); SetCommentSI(pFileMP3, buff); // トラック番号 ape.GetComment(CTag_Ape::APE_TAG_FIELD_TRACK, buff); SetTrackNumberSI(pFileMP3, buff); // ジャンル番号 ape.GetComment(CTag_Ape::APE_TAG_FIELD_GENRE, buff); SetGenreSI(pFileMP3, buff); //SetBGenre(STEPGetGenreCode(buff)); // 作曲者 ape.GetComment("Composer", buff); SetComposerSI(pFileMP3, buff); // パフォーマー ape.GetComment("Performer", buff); SetOrigArtistSI(pFileMP3, buff); // ファイル形式:APE SetFileTypeName(pFileMP3, "Monkey's Audio"); if (!ape.HasApetag() && ape.HasId3tag()) { SetFormat(pFileMP3, nFileTypeAPEID3); SetFileTypeName(pFileMP3, "Monkey's Audio(ID3)"); } else { SetFormat(pFileMP3, nFileTypeAPE); if (ape.HasApetag()) { if (ape.isApetagV1()) { SetFileTypeName(pFileMP3, "Monkey's Audio(APE)"); } else { SetFileTypeName(pFileMP3, "Monkey's Audio(APEv2)"); } } } return true; }
OSStatus CAAudioUnit::ConfigureDynamicScope (AudioUnitScope inScope, UInt32 inNumElements, UInt32 *inChannelsPerElement, Float64 inSampleRate) { SInt32 numChannels = 0; bool isDyamic = HasDynamicScope (inScope, numChannels); if (isDyamic == false) return kAudioUnitErr_InvalidProperty; //lets to a sanity check... // if numChannels == -1, then it can do "any"... if (numChannels > 0) { SInt32 count = 0; for (unsigned int i = 0; i < inNumElements; ++i) count += inChannelsPerElement[i]; if (count > numChannels) return kAudioUnitErr_InvalidPropertyValue; } OSStatus result = SetElementCount (inScope, inNumElements); if (result) return result; CAStreamBasicDescription desc; desc.mSampleRate = inSampleRate; for (unsigned int i = 0; i < inNumElements; ++i) { desc.SetCanonical (inChannelsPerElement[i], false); result = SetFormat (inScope, i, desc); if (result) return result; } return noErr; }
int TextWidget::Load(XmlSynthElem *elem) { SynthWidget::Load(elem); char *txt; if (elem->GetAttribute("lbl", &txt) == 0) { SetText(txt); delete txt; } if (elem->GetAttribute("fmt", &txt) == 0) { SetFormat(txt); delete txt; } short th; if (elem->GetAttribute("th", th) == 0) SetTextHeight(th); if (elem->GetAttribute("bold", th) == 0) SetBold(th); if (elem->GetAttribute("italic", th) == 0) SetItalic(th); if (elem->GetAttribute("filled", th) == 0) SetFilled(th); if (elem->GetAttribute("align", th) == 0) SetAlign(th); if (elem->GetAttribute("shadow", th) == 0) SetShadow(th); if (elem->GetAttribute("inset", th) == 0) SetInset(th); if (elem->GetAttribute("edit", th) == 0) editable = (int) th; return 0; }
void CuModule::SetSampler(CuModule::TexBind* texBind, const CuTexSamplerAttr& sampler) { SetFormat(texBind, sampler.fmt, sampler.numPackedComponents); if(texBind->sampler.addressX != sampler.addressX) { cuTexRefSetAddressMode(texBind->texRef, 0, texBind->sampler.addressX); texBind->sampler.addressX = sampler.addressX; } if(texBind->sampler.addressY != sampler.addressY) { cuTexRefSetAddressMode(texBind->texRef, 1, texBind->sampler.addressY); texBind->sampler.addressY = sampler.addressY; } if(texBind->sampler.addressZ != sampler.addressZ) { cuTexRefSetAddressMode(texBind->texRef, 2, texBind->sampler.addressZ); texBind->sampler.addressZ = sampler.addressZ; } if(texBind->sampler.filter != sampler.filter) { cuTexRefSetFilterMode(texBind->texRef, sampler.filter); texBind->sampler.filter = sampler.filter; } if((texBind->sampler.readAsInteger != sampler.readAsInteger) || (texBind->sampler.normCoord != sampler.normCoord)) { uint flags = (sampler.readAsInteger ? CU_TRSF_READ_AS_INTEGER : 0) | (sampler.normCoord ? CU_TRSF_NORMALIZED_COORDINATES : 0); texBind->sampler.readAsInteger = sampler.readAsInteger; texBind->sampler.normCoord = sampler.normCoord; cuTexRefSetFlags(texBind->texRef, flags); } }
CClipboard::CClipboard(HWND hWnd, LPCSTR Format) : m_hWnd(hWnd) { m_hWnd = NULL; SetFormat(Format); memset(&m_Owner, 0, sizeof(GUID)); }
NTSTATUS WaveStream::Init(IN Wave *Miniport_,IN PPORTWAVERTSTREAM PortStream_,IN ULONG Pin_,IN BOOLEAN Capture_,IN PKSDATAFORMAT DataFormat) { PAGED_CODE(); debug("WaveStream[%d]::Init: [%p, %p]\n",Pin_,this,Miniport_); magic=object_magic; wave=Miniport_; Pin=Pin_; State=KSSTATE_STOP; PortStream=PortStream_; AudioPosition=0; NotificationCount=0; AudioBuffer=NULL; NotificationEvent=NULL; is_asio=false; is_16bit=false; is_recording=!!Capture_; enabled=false; NTSTATUS ntStatus=SetFormat(DataFormat); return ntStatus; }
EditorCamera::EditorCamera(const std::string& name) : EditorSceneNode(name) { SetMesh(Mesh::GetOrCreate<QuadMesh>("NSGEditorCamera")); SetMaterial(Material::GetOrCreate("NSGEditorCamera")); material_->SetRenderPass(RenderPass::TEXT); material_->SetDiffuseColor(COLOR_DODGER_BLUE); material_->EnableTransparent(true); material_->SetAlpha(0.9f); material_->SetBillboardType(BillboardType::SPHERICAL); material_->CastShadow(false); material_->ReceiveShadows(false); const int SIZE = 8; static const unsigned char image[SIZE][SIZE] { {X X X X X X X X}, {X O O O O O O X}, {X O X X X X O X}, {X O X O O O O X}, {X O X O O O O X}, {X O X X X X O X}, {X O O O O O O X}, {X X X X X X X X}, }; auto texture = std::make_shared<Texture2D>(); texture->SetFormat(GL_ALPHA); texture->SetData(&image[0][0]); texture->SetSize(SIZE, SIZE); texture->SetMapType(TextureType::COL); material_->SetTextMap(texture); texture->SetFilterMode(TextureFilterMode::NEAREST); }
void RichQtfParser::EndPart() { if(istable) { if(paragraph.GetCount() == 0 && text.GetCount() == 0) if(table.GetCount()) table.Top().text.CatPick(pick(tablepart)); else target.CatPick(pick(tablepart)); else { paragraph.part.Clear(); text.Clear(); } } else { Flush(); if(table.GetCount()) table.Top().text.Cat(paragraph, target.GetStyles()); else { if(breakpage) paragraph.format.newpage = true; target.Cat(paragraph); } paragraph.part.Clear(); SetFormat(); breakpage = false; } istable = false; }
//--------------------------------------------------------------------------------------- void tNdp2kTableDataSources::LoadFromGlobalSettings() { SetFormat( m_DefaultFormat ); int errors = 0; m_Table.resize( m_DefaultFormatRows ); for (int row = 0; row < m_DefaultFormatRows; ++row) { tDataType type = m_pDefaultFormatTypes[row]; eDigitalDataSourceSettingId settingID = m_pSourceSettings->DataTypeToSettingId( type ); tRow& info = m_Table[ row ]; info.dataType = type; info.dirty = false; bool ok = false; if (m_pSourceSettings->SettingIdToDataType( settingID ) == type) { tDigitalDataSourceSettings::tGlobalSetting setting(*m_pSourceSettings); if (setting.Load( settingID )) { info.selections = setting.DataSourceList(); ok = true; } } if (ok == false) { info.selections = tSourceSelections(); ++errors; } } }
TLvColumn::TLvColumn(const tstring& text, int width, TFormat how, int subitemIndex) { Init(); SetText(text); SetWidth(width, text); SetFormat(how); SetSubItem(subitemIndex); }
wxGridCellFloatRenderer::wxGridCellFloatRenderer(int width, int precision, int format) { SetWidth(width); SetPrecision(precision); SetFormat(format); }
CTrackMgrClient::CTrackMgrClient(const string& host, unsigned int port) : m_HostType(eHost_port), m_Host(host), m_Port(port) { SetFormat(eSerial_AsnBinary); x_Init(); }
void CFolderItem::GetDataFromDatabase(const TCHAR *szNotFound) { char szSettingName[256]; strcpy_s(szSettingName, _countof(szSettingName), m_szSection); strcat_s(szSettingName, _countof(szSettingName), m_szName); ptrT tszValue(db_get_tsa(NULL, ModuleName, szSettingName)); SetFormat(tszValue != NULL ? tszValue : szNotFound); }
int EncoderMFC::Initialize(int width, int height,int codec,float rate,int bitrate,int in_bufs,int out_bufs, int stream_b_size){ if(state!=MFC_ST_OPEN) return -1; if(SetFormat(width,height)<0) return -1; if(SetCodec(codec,stream_b_size)<0) return -1; SetRate(rate); if(bitrate>0) SetBitRate(bitrate); if(InitBuffers(DIR_IN,in_bufs)<0) return -1; if(InitBuffers(DIR_OUT,out_bufs)<0) return -1; for (int bi=0;bi<out_bufs;bi++){ struct v4l2_buffer buf; struct v4l2_plane planes[MFC_MAX_PLANES]; buf.type = io_dir_to_type(DIR_OUT); buf.memory = V4L2_MEMORY_MMAP; buf.m.planes = planes; buf.length = NPPBuf[DIR_OUT]; buf.index=bi; for (u_int i = 0; i < NPPBuf[DIR_OUT]; i++){ planes[i].length=PlanesLen[DIR_OUT][i]; planes[i].bytesused=0; planes[i].m.userptr=(u_long)BufAddr[DIR_OUT][bi][i]; } int ret = ioctl(fd, VIDIOC_QBUF, &buf); if (ret != 0) { printf("\nEncoderMFC: Init, Queue buffer %d error! %s\n",buf.index,strerror(errno)); } } BufInx[DIR_OUT]=0; state=MFC_ST_OK; return 0; }
ExportFFmpeg::ExportFFmpeg() : ExportPlugin() { mEncFormatCtx = NULL; // libavformat's context for our output file mEncFormatDesc = NULL; // describes our output file to libavformat mEncAudioStream = NULL; // the output audio stream (may remain NULL) mEncAudioCodecCtx = NULL; // the encoder for the output audio stream mEncAudioEncodedBuf = NULL; // buffer to hold frames encoded by the encoder #define MAX_AUDIO_PACKET_SIZE (128 * 1024) mEncAudioEncodedBufSiz = 4*MAX_AUDIO_PACKET_SIZE; mEncAudioFifoOutBuf = NULL; // buffer to read _out_ of the FIFO into mSampleRate = 0; mSupportsUTF8 = true; PickFFmpegLibs(); // DropFFmpegLibs() call is in ExportFFmpeg::Destroy() int newfmt; // Adds export types from the export type list for (newfmt = 0; newfmt < FMT_LAST; newfmt++) { wxString shortname(ExportFFmpegOptions::fmts[newfmt].shortname); //Don't hide export types when there's no av-libs, and don't hide FMT_OTHER if (newfmt < FMT_OTHER && FFmpegLibsInst->ValidLibsLoaded()) { // Format/Codec support is compiled in? AVOutputFormat *avoformat = FFmpegLibsInst->guess_format(shortname.mb_str(), NULL, NULL); AVCodec *avcodec = FFmpegLibsInst->avcodec_find_encoder(ExportFFmpegOptions::fmts[newfmt].codecid); if (avoformat == NULL || avcodec == NULL) { ExportFFmpegOptions::fmts[newfmt].compiledIn = false; continue; } } int fmtindex = AddFormat() - 1; SetFormat(ExportFFmpegOptions::fmts[newfmt].name,fmtindex); AddExtension(ExportFFmpegOptions::fmts[newfmt].extension,fmtindex); // For some types add other extensions switch(newfmt) { case FMT_M4A: AddExtension(wxString(wxT("3gp")),fmtindex); AddExtension(wxString(wxT("m4r")),fmtindex); AddExtension(wxString(wxT("mp4")),fmtindex); break; case FMT_WMA2: AddExtension(wxString(wxT("asf")),fmtindex); AddExtension(wxString(wxT("wmv")),fmtindex); break; default: break; } SetMaxChannels(ExportFFmpegOptions::fmts[newfmt].maxchannels,fmtindex); SetCanMetaData(ExportFFmpegOptions::fmts[newfmt].canmetadata,fmtindex); SetDescription(ExportFFmpegOptions::fmts[newfmt].description,fmtindex); } }
ExportFLAC::ExportFLAC() : ExportPlugin() { AddFormat(); SetFormat(wxT("FLAC"),0); AddExtension(wxT("flac"),0); SetMaxChannels(FLAC__MAX_CHANNELS,0); SetCanMetaData(true,0); SetDescription(_("FLAC Files"),0); }
ExportMP2::ExportMP2() : ExportPlugin() { AddFormat(); SetFormat(wxT("MP2"),0); AddExtension(wxT("mp2"),0); SetMaxChannels(2,0); SetCanMetaData(true,0); SetDescription(_("MP2 Files"),0); }
CocaDataObject::CocaDataObject( const coca::INode* node, const coca::IAttribute* attribute ) { wxString formatId( "cocaData" ); formatId << ::wxGetProcessId(); // no interproccess dnd for now SetFormat( formatId.c_str() ); //memset( &_data, 0, sizeof( SData ) ); _data.node = node; _data.attribute = attribute; }
void CFolderItem::GetDataFromDatabase(const TCHAR *szNotFound) { char name[256]; strcpy_s(name, sizeof(name), m_szSection); strcat_s(name, sizeof(name), m_szName); TCHAR buffer[MAX_FOLDER_SIZE]; GetStringFromDatabase(name, szNotFound, buffer, SIZEOF(buffer)); SetFormat(buffer); }
ExportOGG::ExportOGG() : ExportPlugin() { AddFormat(); SetFormat(wxT("OGG"),0); AddExtension(wxT("ogg"),0); SetMaxChannels(255,0); SetCanMetaData(true,0); SetDescription(_("Ogg Vorbis Files"),0); }
OSStatus CAAudioUnit::SetSampleRate (AudioUnitScope inScope, AudioUnitElement inEl, Float64 inRate) { AudioStreamBasicDescription desc; OSStatus result = GetFormat (inScope, inEl, desc); if (result) return result; desc.mSampleRate = inRate; return SetFormat (inScope, inEl, desc); }
BEGIN_NCBI_SCOPE BEGIN_objects_SCOPE CTrackMgrClient::CTrackMgrClient(const string& service) : m_HostType(eNamed_service) { SetService(service); SetFormat(eSerial_AsnBinary); x_Init(); }
void * AfxGlImage::GlReadPixels(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type) { void * newMemory = SetFormat(width, height, format, type); if(!newMemory) return 0; glReadPixels(x, y, width, height, format, type, newMemory); return newMemory; }
/** * Initialize camera device and throws an exception in case of error */ void Camera::Initialize() throw (std::string) { /* Common output string in case of error */ std::ostringstream output_message; /* Set image format */ SetFormat(format_); /* Set streaming parameters */ SetFps(format_); std::cout << "FPS: " << format_->fps << std::endl; /* Request buffers to video capture streaming */ struct v4l2_requestbuffers request_buffers; memset(&request_buffers, 1, sizeof(request_buffers)); request_buffers.count = 4; request_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; request_buffers.memory = V4L2_MEMORY_MMAP; if (xioctl(VIDIOC_REQBUFS, &request_buffers) == -1) { if (errno == EINVAL) { output_message << device_ << " doesn't support memory mapping"; throw std::string(output_message.str()); } else { throw std::string("Error in VIDIOC_REQBUFS"); } } /* Check if we'll have enough image buffers */ if (request_buffers.count < 2) { output_message << "Insufficient buffers in device " << device_; throw std::string(output_message.str()); } buffers_ = (Buffer *) calloc(request_buffers.count, sizeof(*buffers_)); if (buffers_ == NULL) { throw std::string("Not enough memory to allocate memory shared buffers"); } for (int num_buffer = 0; num_buffer < (int) request_buffers.count; num_buffer++) { struct v4l2_buffer buffer; memset(&buffer, 1, sizeof(buffer)); buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = num_buffer; if (xioctl(VIDIOC_QUERYBUF, &buffer) == -1) { throw std::string("Error in VIDIOC_QUERYBUF"); } buffers_[num_buffer].size = buffer.length; buffers_[num_buffer].mem = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera_fd_, buffer.m.offset); if (buffers_[num_buffer].mem == MAP_FAILED) { throw std::string("Error in mmap (MAP_FAILED)"); } } /* Camera now ready to start streaming */ initialized_ = true; }
OSStatus CAAudioUnit::SetNumberChannels (AudioUnitScope inScope, AudioUnitElement inEl, UInt32 inChans) { // set this as the output of the AU CAStreamBasicDescription desc; OSStatus result = GetFormat (inScope, inEl, desc); if (result) return result; desc.SetCanonical (inChans, desc.IsInterleaved()); result = SetFormat (inScope, inEl, desc); return result; }
void CGridCellDateTime::Init(DWORD dwStyle) { m_dwStyle = dwStyle; SetTime(CTime::GetCurrentTime()); SetFormat(DT_CENTER|DT_VCENTER|DT_SINGLELINE|DT_NOPREFIX #ifndef _WIN32_WCE |DT_END_ELLIPSIS #endif ); }
void HwSetWaveFormat (allegro_devc * devc, PWAVE_INFO WaveInfo, unsigned char Direction) { if (!Direction) WaveStream = &CaptureStream; else WaveStream = &PlaybackStream; SetFormat (devc, *WaveStream, WaveInfo); }