static STSStatus compute_signature(char* buffer, uint32_t bufferSize, char* paramsArray[], const uint32_t paramsCount, const char* accessKeySecret) { ////////////////////////////////////////////////////////////////////////// // sign uint32_t canonLen = 0; char canonicalizedQueryString[2048 * 3]; compose_canonicalized_query_string(canonicalizedQueryString, 2048 * 3, &canonLen, paramsArray, paramsCount); string_buffer(strToSign, 2048 * 3); string_buffer_initialize(strToSign); int fit; string_buffer_append(strToSign, "POST&%2F&", 9, fit); if (!fit) { return STSStatusUriTooLong; } string_buffer(percentTwice, 2048 * 3); string_buffer_initialize(percentTwice); percentEncode(percentTwice, canonicalizedQueryString, canonLen); string_buffer_append(strToSign, percentTwice, strlen(percentTwice), fit); //fprintf(stdout, "strToSign(%lu): %s\n", strlen(strToSign), strToSign); // Generate an HMAC-SHA-1 of the strToSign size_t akSecretLen = strlen(accessKeySecret); char newAccessKeySecret[akSecretLen + 1]; snprintf(newAccessKeySecret, akSecretLen+2, "%s&", accessKeySecret); unsigned char hmac[20]; STS_HMAC_SHA1(hmac, (unsigned char *) newAccessKeySecret, strlen(newAccessKeySecret), (unsigned char *) strToSign, strlen(strToSign)); // Now base-64 encode the results char b64[((20 + 1) * 4) / 3]; int b64Len = base64Encode(hmac, 20, b64); char b64Encoded[256]; if (!urlEncode(b64Encoded, b64, b64Len)) { return STSStatusUriTooLong; } snprintf(buffer, strlen(b64Encoded)+1, "%s", b64Encoded); return STSStatusOK; }
static void direntry_get_path_inner (direntry_t *de, string_buffer_t *path) { direntry_t *parent; if ((parent = direntry_get_parent(de))) { direntry_get_path_inner(parent, path); direntry_delete(CALLER_INFO parent); } string_buffer_append(path, direntry_get_name(de)); string_buffer_append(path, strdup("/")); }
char * str_replace (const char *haystack, const char *needle, const char *replacement) { assert (haystack != NULL); assert (needle != NULL); assert (replacement != NULL); string_buffer_t *sb = string_buffer_create (); size_t haystack_len = strlen (haystack); size_t needle_len = strlen (needle); int pos = 0; while (pos < haystack_len) { if (needle_len > 0 && str_starts_with (&haystack[pos], needle)) { string_buffer_append_string (sb, replacement); pos += needle_len; } else { string_buffer_append (sb, haystack[pos]); pos++; } } if (needle_len == 0 && haystack_len == 0) string_buffer_append_string (sb, replacement); char *res = string_buffer_to_string (sb); string_buffer_destroy (sb); return res; }
static S3Status copyObjectXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { CopyObjectData *coData = (CopyObjectData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "CopyObjectResult/LastModified")) { string_buffer_append(coData->lastModified, data, dataLen, fit); } else if (!strcmp(elementPath, "CopyObjectResult/ETag")) { if (coData->eTagReturnSize && coData->eTagReturn) { coData->eTagReturnLen += snprintf(&(coData->eTagReturn[coData->eTagReturnLen]), coData->eTagReturnSize - coData->eTagReturnLen - 1, "%.*s", dataLen, data); if (coData->eTagReturnLen >= coData->eTagReturnSize) { return S3StatusXmlParseFailure; } } } } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }
static S3Status xmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { XmlCallbackData *cbData = (XmlCallbackData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "ListAllMyBucketsResult/Owner/ID")) { string_buffer_append(cbData->ownerId, data, dataLen, fit); } else if (!strcmp(elementPath, "ListAllMyBucketsResult/Owner/DisplayName")) { string_buffer_append(cbData->ownerDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListAllMyBucketsResult/Buckets/Bucket/Name")) { string_buffer_append(cbData->bucketName, data, dataLen, fit); } else if (!strcmp (elementPath, "ListAllMyBucketsResult/Buckets/Bucket/CreationDate")) { string_buffer_append(cbData->creationDate, data, dataLen, fit); } } else { if (!strcmp(elementPath, "ListAllMyBucketsResult/Buckets/Bucket")) { // Parse date. Assume ISO-8601 date format. time_t creationDate = parseIso8601Time(cbData->creationDate); // Make the callback - a bucket just finished S3Status status = (*(cbData->listServiceCallback)) (cbData->ownerId, cbData->ownerDisplayName, cbData->bucketName, creationDate, cbData->callbackData); string_buffer_initialize(cbData->bucketName); string_buffer_initialize(cbData->creationDate); return status; } } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }
static S3Status commitMultipartResponseXMLcallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { int fit; CommitMultiPartData *commit_data = (CommitMultiPartData *) callbackData; if (data) { if (!strcmp(elementPath, "CompleteMultipartUploadResult/Location")) { string_buffer_append(commit_data->location, data, dataLen, fit); } else if (!strcmp(elementPath, "CompleteMultipartUploadResult/ETag")) { string_buffer_append(commit_data->etag, data, dataLen, fit); } } (void) fit; return S3StatusOK; }
static S3Status getBlsDataCallback(int bufferSize, const char *buffer, void *callbackData) { GetBlsData *gsData = (GetBlsData *) callbackData; int fit; string_buffer_append(gsData->blsXmlDocument, buffer, bufferSize, fit); return fit ? S3StatusOK : S3StatusXmlDocumentTooLarge; }
static void receive_advert( const int socket, struct sockaddr *sa, const socklen_t socklen_in, void * const addr_src, const size_t host_len, new_indexnode_event_t packet_received_cb, void *packet_received_ctxt ) { char host[host_len]; char buf[1024]; char *port, *fs2protocol, *id; string_buffer_t *buffer = string_buffer_new(); int recv_rc; socklen_t socklen; /* For UDP it is specified that recv*() will return the whole packet in one * go. It is not correct to keep calling recv*() to get more of the message; * this isn't a stream. If the message is too big for the buffer it's simply * truncated. Usually silently, but by passing in MSG_TRUNC one gets the * real length of the message back, even if it has to be truncated. This * allows us to assert that our buffer is big enough. We've had to take a * guess because advert packets are variable length, but it's an assertion * because 1024 really should be enough */ errno = 0; recv_rc = recvfrom(socket, buf, sizeof(buf) - 1, MSG_TRUNC, sa, &socklen); assert(recv_rc <= (int)sizeof(buf) - 1); if (recv_rc >= 0) { assert(socklen == socklen_in); buf[recv_rc] = '\0'; string_buffer_append(buffer, strdup(buf)); if (!parse_advert_packet(string_buffer_peek(buffer), &port, &fs2protocol, &id) && inet_ntop(AF_INET, addr_src, host, sizeof(host) - 1)) { packet_received_cb(packet_received_ctxt, strdup(host), port, fs2protocol, id); } } else { trace_warn("failed to recvfrom() and indexnode advert packet: %s\n", strerror(errno)); } string_buffer_delete(buffer); }
static S3Status initialMultipartXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { InitialMultipartData *mdata = (InitialMultipartData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "InitiateMultipartUploadResult/UploadId")) { string_buffer_append(mdata->upload_id,data, dataLen, fit); } } (void) fit; return S3StatusOK; }
static S3Status testBucketXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { TestBucketData *tbData = (TestBucketData *) callbackData; int fit; if (data && !strcmp(elementPath, "LocationConstraint")) { string_buffer_append(tbData->locationConstraint, data, dataLen, fit); } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }
/* * get the next match from the current position, * throught the dictionary. * this will return all the matchs. * * @return friso_array_t that contains all the matchs. */ __STATIC_API__ friso_array_t get_next_match( friso_t friso, friso_task_t task, uint_t idx ) { register uint_t t; string_buffer_t sb = new_string_buffer_with_string( task->buffer ); //create a match dynamic array. friso_array_t match = new_array_list_with_opacity( friso->max_len ); array_list_add( match, friso_dic_get( friso->dic, __LEX_CJK_WORDS__, task->buffer ) ); for ( t = 1; t < friso->max_len && ( task->bytes = read_next_word( task, &idx, task->buffer ) ) != 0; t++ ) { task->unicode = get_utf8_unicode( task->buffer ); if ( utf8_whitespace( task->unicode ) ) break; if ( ! utf8_cjk_string( task->unicode ) ) break; //append the task->buffer to the buffer. string_buffer_append( sb, task->buffer ); //check the CJK dictionary. if ( friso_dic_match( friso->dic, __LEX_CJK_WORDS__, sb->buffer ) ) { /* * add the lex_entry_t insite. * here is a key point: * we use friso_dic_get function to get the address of the lex_entry_cdt * that store in the dictionary, not create a new lex_entry_cdt. * so : * 1.we will not bother to the allocations of the newly created lex_entry_cdt. * 2.more efficient of course. */ array_list_add( match, friso_dic_get( friso->dic, __LEX_CJK_WORDS__, sb->buffer ) ); } } /*buffer allocations clear*/ free_string_buffer( sb ); //array_list_trim( match ); return match; }
//get the next cjk word from the current position, with simple mode. __STATIC_API__ friso_hits_t next_simple_cjk( friso_t friso, friso_task_t task ) { uint_t t, idx = task->idx, __length__; string_buffer_t sb = new_string_buffer_with_string( task->buffer ); lex_entry_t e = friso_dic_get( friso->dic, __LEX_CJK_WORDS__, sb->buffer ); /* * here bak the e->length in the task->hits->type. * we will use it to count the task->idx. * for the sake of use less variable. */ __length__ = e->length; for ( t = 1; t < friso->max_len && ( task->bytes = read_next_word( task, &idx, task->buffer ) ) != 0; t++ ) { task->unicode = get_utf8_unicode( task->buffer ); if ( utf8_whitespace( task->unicode ) ) break; if ( ! utf8_cjk_string( task->unicode ) ) break; string_buffer_append( sb, task->buffer ); //check the existence of the word by search the dictionary. if ( friso_dic_match( friso->dic, __LEX_CJK_WORDS__, sb->buffer ) ) { e = friso_dic_get( friso->dic, __LEX_CJK_WORDS__, sb->buffer ); } } //correct the offset of the segment. task->idx += ( e->length - __length__ ); free_string_buffer( sb ); //free the buffer //reset the hits. task->hits->word = e->word; task->hits->type = __FRISO_SYS_WORDS__; return task->hits; }
zarray_t * str_split (const char *str, const char *delim) { assert (str != NULL); assert (delim != NULL); zarray_t *parts = zarray_create (sizeof(char*)); string_buffer_t *sb = string_buffer_create (); size_t delim_len = strlen (delim); size_t len = strlen (str); size_t pos = 0; while (pos < len) { if (str_starts_with (&str[pos], delim) && delim_len > 0) { pos += delim_len; // never add empty strings (repeated tokens) if (string_buffer_size (sb) > 0) { char *part = string_buffer_to_string (sb); zarray_add (parts, &part); } string_buffer_reset (sb); } else { string_buffer_append (sb, str[pos]); pos++; } } if (string_buffer_size(sb) > 0) { char *part = string_buffer_to_string (sb); zarray_add (parts, &part); } string_buffer_destroy (sb); return parts; }
static S3Status convertAclXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { ConvertAclData *caData = (ConvertAclData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "AccessControlPolicy/Owner/ID")) { caData->ownerIdLen += snprintf(&(caData->ownerId[caData->ownerIdLen]), S3_MAX_GRANTEE_USER_ID_SIZE - caData->ownerIdLen - 1, "%.*s", dataLen, data); if (caData->ownerIdLen >= S3_MAX_GRANTEE_USER_ID_SIZE) { return S3StatusUserIdTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/Owner/" "DisplayName")) { caData->ownerDisplayNameLen += snprintf(&(caData->ownerDisplayName [caData->ownerDisplayNameLen]), S3_MAX_GRANTEE_DISPLAY_NAME_SIZE - caData->ownerDisplayNameLen - 1, "%.*s", dataLen, data); if (caData->ownerDisplayNameLen >= S3_MAX_GRANTEE_DISPLAY_NAME_SIZE) { return S3StatusUserDisplayNameTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/Grant/" "Grantee/EmailAddress")) { // AmazonCustomerByEmail string_buffer_append(caData->emailAddress, data, dataLen, fit); if (!fit) { return S3StatusEmailAddressTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/Grant/" "Grantee/ID")) { // CanonicalUser string_buffer_append(caData->userId, data, dataLen, fit); if (!fit) { return S3StatusUserIdTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/Grant/" "Grantee/DisplayName")) { // CanonicalUser string_buffer_append(caData->userDisplayName, data, dataLen, fit); if (!fit) { return S3StatusUserDisplayNameTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/Grant/" "Grantee/URI")) { // Group string_buffer_append(caData->groupUri, data, dataLen, fit); if (!fit) { return S3StatusGroupUriTooLong; } } else if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/Grant/" "Permission")) { // Permission string_buffer_append(caData->permission, data, dataLen, fit); if (!fit) { return S3StatusPermissionTooLong; } } } else { if (!strcmp(elementPath, "AccessControlPolicy/AccessControlList/" "Grant")) { // A grant has just been completed; so add the next S3AclGrant // based on the values read if (*(caData->aclGrantCountReturn) == S3_MAX_ACL_GRANT_COUNT) { return S3StatusTooManyGrants; } S3AclGrant *grant = &(caData->aclGrants [*(caData->aclGrantCountReturn)]); if (caData->emailAddress[0]) { grant->granteeType = S3GranteeTypeAmazonCustomerByEmail; strcpy(grant->grantee.amazonCustomerByEmail.emailAddress, caData->emailAddress); } else if (caData->userId[0] && caData->userDisplayName[0]) { grant->granteeType = S3GranteeTypeCanonicalUser; strcpy(grant->grantee.canonicalUser.id, caData->userId); strcpy(grant->grantee.canonicalUser.displayName, caData->userDisplayName); } else if (caData->groupUri[0]) { if (!strcmp(caData->groupUri, ACS_GROUP_AWS_USERS)) { grant->granteeType = S3GranteeTypeAllAwsUsers; } else if (!strcmp(caData->groupUri, ACS_GROUP_ALL_USERS)) { grant->granteeType = S3GranteeTypeAllUsers; } else if (!strcmp(caData->groupUri, ACS_GROUP_LOG_DELIVERY)) { grant->granteeType = S3GranteeTypeLogDelivery; } else { return S3StatusBadGrantee; } } else { return S3StatusBadGrantee; } if (!strcmp(caData->permission, "READ")) { grant->permission = S3PermissionRead; } else if (!strcmp(caData->permission, "WRITE")) { grant->permission = S3PermissionWrite; } else if (!strcmp(caData->permission, "READ_ACP")) { grant->permission = S3PermissionReadACP; } else if (!strcmp(caData->permission, "WRITE_ACP")) { grant->permission = S3PermissionWriteACP; } else if (!strcmp(caData->permission, "FULL_CONTROL")) { grant->permission = S3PermissionFullControl; } else { return S3StatusBadPermission; } (*(caData->aclGrantCountReturn))++; string_buffer_initialize(caData->emailAddress); string_buffer_initialize(caData->userId); string_buffer_initialize(caData->userDisplayName); string_buffer_initialize(caData->groupUri); string_buffer_initialize(caData->permission); } } return S3StatusOK; }
//get the next latin word from the current position. __STATIC_API__ friso_hits_t next_basic_latin( friso_t friso, friso_task_t task ) { char __convert = 0, t = 0; string_buffer_t sb, temp; lex_entry_t e = NULL; //full-half width and upper-lower case exchange. task->unicode = get_utf8_unicode( task->buffer ); ___LATAIN_FULL_UPPER_CHECK___ //creat a new string buffer and append the task->buffer insite. sb = new_string_buffer_with_string( task->buffer ); //segmentation. while ( ( task->bytes = read_next_word( task, &task->idx, task->buffer ) ) != 0 ) { task->unicode = get_utf8_unicode( task->buffer ); if ( utf8_whitespace( task->unicode ) ) break; if ( utf8_en_punctuation( task->unicode ) && ! utf8_keep_punctuation( task->buffer ) ) break; if ( ! ( utf8_halfwidth_letter_digit( task->unicode ) || utf8_fullwidth_letter_digit( task->unicode ) ) ) { task->idx -= task->bytes; t = 1; break; } //full-half width and upper-lower case convert ___LATAIN_FULL_UPPER_CHECK___ //append the word the buffer. string_buffer_append( sb, task->buffer ); } /*clear the useless english punctuation from the end of the buffer.*/ for ( ; sb->length > 0 && sb->buffer[ sb->length - 1 ] != '%' && is_en_punctuation( sb->buffer[ sb->length - 1 ] ); ) { sb->buffer[ --sb->length ] = '\0'; } /* * find the chinese or english mixed word. * or single chinese units.*/ if ( t == 1 ) { if ( utf8_cjk_string( task->unicode ) ) { //temp string buffer. temp = new_string_buffer_with_string( sb->buffer ); for ( t = 0; t < friso->mix_len && ( task->bytes = read_next_word( task , &task->idx, task->buffer ) ) != 0; t++ ) { task->unicode = get_utf8_unicode( task->buffer ); if ( ! utf8_cjk_string( task->unicode ) ) { task->idx -= task->bytes; break; } string_buffer_append( temp, task->buffer ); //check the mixed word dictionary. if ( friso_dic_match( friso->dic, __LEX_MIX_WORDS__, temp->buffer ) ) { __convert = 1; //get the lexicon entry from the dictionary. e = friso_dic_get( friso->dic, __LEX_MIX_WORDS__, temp->buffer ); } } //correct the segmentation offset. task->idx -= ( temp->length - ( e == NULL ? sb->length : e->length ) ); free_string_buffer( temp ); //no match for mix word, try to find a single chinese unit. if ( __convert == 0 ) { //check if it is string made up with numeric if ( utf8_numeric_string( sb->buffer ) && ( task->bytes = read_next_word( task, &task->idx, task->buffer ) ) != 0 ) { //check the single chinese units dictionary. if ( friso_dic_match( friso->dic, __LEX_CJK_UNITS__, task->buffer ) ) { string_buffer_append( sb, task->buffer ); } else { task->idx -= task->bytes; } } } //end convert condition } } if ( __convert == 1 ) { free_string_buffer( sb ); task->hits->word = e->word; task->hits->type = __FRISO_SYS_WORDS__; } else { /* * adjust the string buffer. * here we do not trim the buffer cause its allocations will be free * after the call of friso_next - sooner or later it will be released. * if your memory almost run out, you should call string_buffer_trim. * or we save the time to do the allocations and copy the buffer insite. */ //string_buffer_trim( sb ); task->hits->word = string_buffer_devote( sb ); task->hits->type = __FRISO_NEW_WORDS__; } return task->hits; }
static S3Status listBucketXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { ListBucketData *lbData = (ListBucketData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "ListBucketResult/IsTruncated")) { string_buffer_append(lbData->isTruncated, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/NextMarker")) { string_buffer_append(lbData->nextMarker, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/Key")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append(contents->key, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/LastModified")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append(contents->lastModified, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/ETag")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append(contents->eTag, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/Size")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append(contents->size, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/Owner/ID")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append(contents->ownerId, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/Contents/Owner/DisplayName")) { ListBucketContents *contents = &(lbData->contents[lbData->contentsCount]); string_buffer_append (contents->ownerDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListBucketResult/CommonPrefixes/Prefix")) { int which = lbData->commonPrefixesCount; lbData->commonPrefixLens[which] += snprintf(lbData->commonPrefixes[which], sizeof(lbData->commonPrefixes[which]) - lbData->commonPrefixLens[which] - 1, "%.*s", dataLen, data); if (lbData->commonPrefixLens[which] >= (int) sizeof(lbData->commonPrefixes[which])) { return S3StatusXmlParseFailure; } } } else { if (!strcmp(elementPath, "ListBucketResult/Contents")) { // Finished a Contents lbData->contentsCount++; if (lbData->contentsCount == MAX_CONTENTS) { // Make the callback S3Status status = make_list_bucket_callback(lbData); if (status != S3StatusOK) { return status; } initialize_list_bucket_data(lbData); } else { // Initialize the next one initialize_list_bucket_contents (&(lbData->contents[lbData->contentsCount])); } } else if (!strcmp(elementPath, "ListBucketResult/CommonPrefixes/Prefix")) { // Finished a Prefix lbData->commonPrefixesCount++; if (lbData->commonPrefixesCount == MAX_COMMON_PREFIXES) { // Make the callback S3Status status = make_list_bucket_callback(lbData); if (status != S3StatusOK) { return status; } initialize_list_bucket_data(lbData); } else { // Initialize the next one lbData->commonPrefixes[lbData->commonPrefixesCount][0] = 0; lbData->commonPrefixLens[lbData->commonPrefixesCount] = 0; } } } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }
/* * load the lexicon configuration file. * and load all the valid lexicon from the configuration file. * * @param friso friso instance * @param config friso_config instance * @param _path dictionary directory * @param _limitts words length limit */ FRISO_API void friso_dic_load_from_ifile( friso_t friso, friso_config_t config, fstring _path, uint_t _limits ) { //1.parse the configuration file. FILE *__stream; char __chars__[1024], __key__[30], *__line__; uint_t __length__, i, t; friso_lex_t lex_t; string_buffer_t sb; //get the lexicon configruation file path sb = new_string_buffer(); string_buffer_append( sb, _path ); string_buffer_append( sb, __FRISO_LEX_IFILE__ ); //printf("%s\n", sb->buffer); if ( ( __stream = fopen( sb->buffer, "rb" ) ) != NULL ) { while ( ( __line__ = file_get_line( __chars__, __stream ) ) != NULL ) { //comment filter. if ( __line__[0] == '#' ) continue; if ( __line__[0] == '\0' ) continue; __length__ = strlen( __line__ ); //item start if ( __line__[ __length__ - 1 ] == '[' ) { //get the type key for ( i = 0; i < __length__ && ( __line__[i] == ' ' || __line__[i] == '\t' ); i++ ); for ( t = 0; i < __length__; i++,t++ ) { if ( __line__[i] == ' ' || __line__[i] == '\t' || __line__[i] == ':' ) break; __key__[t] = __line__[i]; } __key__[t] = '\0'; //get the lexicon type lex_t = get_lexicon_type_with_constant(__key__); if ( lex_t == -1 ) continue; //printf("key=%s, type=%d\n", __key__, lex_t ); while ( ( __line__ = file_get_line( __chars__, __stream ) ) != NULL ) { //comments filter. if ( __line__[0] == '#' ) continue; if ( __line__[0] == '\0' ) continue; __length__ = strlen( __line__ ); if ( __line__[ __length__ - 1 ] == ']' ) break; for ( i = 0; i < __length__ && ( __line__[i] == ' ' || __line__[i] == '\t' ); i++ ); for ( t = 0; i < __length__; i++,t++ ) { if ( __line__[i] == ' ' || __line__[i] == '\t' || __line__[i] == ';' ) break; __key__[t] = __line__[i]; } __key__[t] = '\0'; //load the lexicon item from the lexicon file. string_buffer_clear( sb ); string_buffer_append( sb, _path ); string_buffer_append( sb, __key__ ); //printf("key=%s, type=%d\n", __key__, lex_t); friso_dic_load( friso, config, lex_t, sb->buffer, _limits ); } } } //end while fclose( __stream ); } else { printf("Warning: Fail to open the lexicon configuration file %s\n", sb->buffer); } free_string_buffer(sb); }
static S3Status listPartsXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { ListPartsData *lpData = (ListPartsData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "ListPartsResult/IsTruncated")) { string_buffer_append(lpData->isTruncated, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/NextPartNumberMarker")) { string_buffer_append(lpData->nextPartNumberMarker, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/StorageClass")) { string_buffer_append(lpData->storageClass, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Initiator/ID")) { string_buffer_append(lpData->initiatorId, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Initiator/DisplayName")) { string_buffer_append(lpData->initiatorDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Owner/ID")) { string_buffer_append(lpData->ownerId, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Owner/DisplayName")) { string_buffer_append(lpData->ownerDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Part/PartNumber")) { ListPart *parts = &(lpData->parts[lpData->partsCount]); string_buffer_append(parts->partNumber, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Part/LastModified")) { ListPart *parts = &(lpData->parts[lpData->partsCount]); string_buffer_append(parts->lastModified, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Part/ETag")) { ListPart *parts = &(lpData->parts[lpData->partsCount]); string_buffer_append(parts->eTag, data, dataLen, fit); } else if (!strcmp(elementPath, "ListPartsResult/Part/Size")) { ListPart *parts = &(lpData->parts[lpData->partsCount]); string_buffer_append(parts->size, data, dataLen, fit); } } else { if (!strcmp(elementPath, "ListPartsResult/Part")) { // Finished a Contents lpData->partsCount++; if (lpData->partsCount == MAX_PARTS) { // Make the callback S3Status status = make_list_parts_callback(lpData); if (status != S3StatusOK) { return status; } lpData->handlePartsStart += lpData->partsCount; initialize_list_parts_data(lpData); } else { // Initialize the next one initialize_list_part(&(lpData->parts[lpData->partsCount])); } } } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }
static S3Status listMultipartXmlCallback(const char *elementPath, const char *data, int dataLen, void *callbackData) { ListMultipartData *lmData = (ListMultipartData *) callbackData; int fit; if (data) { if (!strcmp(elementPath, "ListMultipartUploadsResult/IsTruncated")) { string_buffer_append(lmData->isTruncated, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/NextKeyMarker")) { string_buffer_append(lmData->nextKeyMarker, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/NextUploadIdMarker")) { string_buffer_append(lmData->nextUploadIdMarker, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/Key")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->key, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/Initiated")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->initiated, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/UploadId")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->uploadId, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/Initiator/ID")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->initiatorId, data, dataLen, fit); } else if (!strcmp (elementPath, "ListMultipartUploadsResult/Upload/Initiator/DisplayName")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->initiatorDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/Owner/ID")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->ownerId, data, dataLen, fit); } else if (!strcmp (elementPath, "ListMultipartUploadsResult/Upload/Owner/DisplayName")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append (uploads->ownerDisplayName, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload/StorageClass")) { ListMultipartUpload *uploads = &(lmData->uploads[lmData->uploadsCount]); string_buffer_append(uploads->storageClass, data, dataLen, fit); } else if (!strcmp(elementPath, "ListMultipartUploadsResult/CommonPrefixes/Prefix")) { int which = lmData->commonPrefixesCount; lmData->commonPrefixLens[which] += snprintf(lmData->commonPrefixes[which], sizeof(lmData->commonPrefixes[which]) - lmData->commonPrefixLens[which] - 1, "%.*s", dataLen, data); if (lmData->commonPrefixLens[which] >= (int) sizeof(lmData->commonPrefixes[which])) { return S3StatusXmlParseFailure; } } } else { if (!strcmp(elementPath, "ListMultipartUploadsResult/Upload")) { // Finished a Contents lmData->uploadsCount++; if (lmData->uploadsCount == MAX_UPLOADS) { // Make the callback S3Status status = make_list_multipart_callback(lmData); if (status != S3StatusOK) { return status; } initialize_list_multipart_data(lmData); } else { // Initialize the next one initialize_list_multipart_upload (&(lmData->uploads[lmData->uploadsCount])); } } else if (!strcmp(elementPath, "ListMultipartUploadsResult/CommonPrefixes/Prefix")) { // Finished a Prefix lmData->commonPrefixesCount++; if (lmData->commonPrefixesCount == MAX_COMMON_PREFIXES) { // Make the callback S3Status status = make_list_multipart_callback(lmData); if (status != S3StatusOK) { return status; } initialize_list_multipart_data(lmData); } else { // Initialize the next one lmData->commonPrefixes[lmData->commonPrefixesCount][0] = 0; lmData->commonPrefixLens[lmData->commonPrefixesCount] = 0; } } } /* Avoid compiler error about variable set but not used */ (void) fit; return S3StatusOK; }