Datum pgbloomfun_add(PG_FUNCTION_ARGS) { bytea *newbloomba, *bloomba = PG_GETARG_BYTEA_P(0); text *key = PG_GETARG_TEXT_P(1); pgbloom_t *pgbloom = get_pgbloom(bloomba); bloom_t newbloom, *bloom = NULL; size_t newbloom_size; int space_left, i; space_left = (pgbloom->last_capacity > pgbloom->last_entries) || (pgbloom->growth_factor == 0); for (i=0; i<pgbloom->filters; i++) { bloom = next_bloom(bloomba, bloom); if (bloom == NULL) { elog(ERROR, "pgbloomfun: missing filter in bloom object"); } if (i == pgbloom->filters - 1 && space_left) { if (bloom_add(bloom, VARDATA(key), VARSIZE(key) - VARHDRSZ) == 0) { pgbloom->total_entries ++; pgbloom->last_entries ++; } PG_RETURN_BYTEA_P(bloomba); } else if (bloom_check(bloom, VARDATA(key), VARSIZE(key) - VARHDRSZ)) { PG_RETURN_BYTEA_P(bloomba); /* key already exists */ } } /* create a new filter */ pgbloom->filters += 1; pgbloom->total_entries += 1; pgbloom->last_entries = 1; pgbloom->last_capacity *= pgbloom->growth_factor; pgbloom->total_capacity += pgbloom->last_capacity; /* calculate and allocate space */ bloom_init(&newbloom, pgbloom->last_capacity, pgbloom->error_rate); newbloom_size = sizeof(newbloom) + newbloom.bits / 8; newbloomba = palloc(VARSIZE(bloomba) + newbloom_size); memcpy(newbloomba, bloomba, VARSIZE(bloomba)); SET_VARSIZE(newbloomba, VARSIZE(bloomba) + newbloom_size); /* initialize the new bloom filter and add the new key to it */ bloom = (bloom_t *) (((unsigned char *) newbloomba) + VARSIZE(bloomba)); memset(bloom, 0, newbloom_size); memcpy(bloom, &newbloom, sizeof(newbloom)); bloom_add(bloom, VARDATA(key), VARSIZE(key) - VARHDRSZ); PG_RETURN_BYTEA_P(newbloomba); }
Datum gistoptions(PG_FUNCTION_ARGS) { Datum reloptions = PG_GETARG_DATUM(0); bool validate = PG_GETARG_BOOL(1); relopt_value *options; GiSTOptions *rdopts; int numoptions; static const relopt_parse_elt tab[] = { {"fillfactor", RELOPT_TYPE_INT, offsetof(GiSTOptions, fillfactor)}, {"buffering", RELOPT_TYPE_STRING, offsetof(GiSTOptions, bufferingModeOffset)} }; options = parseRelOptions(reloptions, validate, RELOPT_KIND_GIST, &numoptions); /* if none set, we're done */ if (numoptions == 0) PG_RETURN_NULL(); rdopts = allocateReloptStruct(sizeof(GiSTOptions), options, numoptions); fillRelOptions((void *) rdopts, sizeof(GiSTOptions), options, numoptions, validate, tab, lengthof(tab)); pfree(options); PG_RETURN_BYTEA_P(rdopts); }
Datum ginoptions(PG_FUNCTION_ARGS) { Datum reloptions = PG_GETARG_DATUM(0); bool validate = PG_GETARG_BOOL(1); relopt_value *options; GinOptions *rdopts; int numoptions; static const relopt_parse_elt tab[] = { {"fastupdate", RELOPT_TYPE_BOOL, offsetof(GinOptions, useFastUpdate)} }; options = parseRelOptions(reloptions, validate, RELOPT_KIND_GIN, &numoptions); /* if none set, we're done */ if (numoptions == 0) PG_RETURN_NULL(); rdopts = allocateReloptStruct(sizeof(GinOptions), options, numoptions); fillRelOptions((void *) rdopts, sizeof(GinOptions), options, numoptions, validate, tab, lengthof(tab)); pfree(options); PG_RETURN_BYTEA_P(rdopts); }
Datum sha_to_bytea_fn(PG_FUNCTION_ARGS) { Sha *value = PG_GETARG_SHA(0); PG_RETURN_BYTEA_P(hexarr_to_bytea(value->bytes, SHA_LENGTH)); }
Datum compactmolecule(PG_FUNCTION_ARGS){ Datum mol_datum = PG_GETARG_DATUM(0); bool options_xyz = PG_GETARG_BOOL(1); void* result = 0; PG_BINGO_BEGIN { BingoPgCommon::BingoSessionHandler bingo_handler(fcinfo->flinfo->fn_oid); bingo_handler.setFunctionName("compactmolecule"); BingoPgText mol_text(mol_datum); int buf_size; const char* mol_buf = mol_text.getText(buf_size); int res_buf; const char* bingo_result = mangoICM(mol_buf, buf_size, options_xyz, &res_buf); if(bingo_result == 0) { CORE_HANDLE_WARNING(0, 1, "bingo.compactmolecule", bingoGetError()); PG_RETURN_NULL(); } BingoPgText result_data; result_data.initFromBuffer(bingo_result, res_buf); result = result_data.release(); } PG_BINGO_END if(result == 0) PG_RETURN_NULL(); PG_RETURN_BYTEA_P(result); }
Datum geography_as_binary(PG_FUNCTION_ARGS) { LWGEOM *lwgeom = NULL; uint8_t *wkb = NULL; bytea *wkb_result; size_t wkb_size = 0; GSERIALIZED *g = (GSERIALIZED*)PG_DETOAST_DATUM(PG_GETARG_DATUM(0)); /* Get our lwgeom form */ lwgeom = lwgeom_from_gserialized(g); if ( gserialized_ndims(g) > 2 ) { /* Strip out the higher dimensions */ LWGEOM *tmp = lwgeom_force_2d(lwgeom); lwgeom_free(lwgeom); lwgeom = tmp; } /* Create WKB */ wkb = lwgeom_to_wkb(lwgeom, WKB_SFSQL, &wkb_size); /* Copy to varlena pointer */ wkb_result = palloc(wkb_size + VARHDRSZ); SET_VARSIZE(wkb_result, wkb_size + VARHDRSZ); memcpy(VARDATA(wkb_result), wkb, wkb_size); /* Clean up */ pfree(wkb); lwgeom_free(lwgeom); PG_RETURN_BYTEA_P(wkb_result); }
Datum binary_decode(PG_FUNCTION_ARGS) { text *data = PG_GETARG_TEXT_P(0); Datum name = PG_GETARG_DATUM(1); bytea *result; char *namebuf; int datalen, resultlen, res; const struct pg_encoding *enc; datalen = VARSIZE(data) - VARHDRSZ; namebuf = TextDatumGetCString(name); enc = pg_find_encoding(namebuf); if (enc == NULL) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("unrecognized encoding: \"%s\"", namebuf))); resultlen = enc->decode_len(VARDATA(data), datalen); result = palloc(VARHDRSZ + resultlen); res = enc->decode(VARDATA(data), datalen, VARDATA(result)); /* Make this FATAL 'cause we've trodden on memory ... */ if (res > resultlen) elog(FATAL, "overflow - decode estimate too small"); SET_VARSIZE(result, VARHDRSZ + res); PG_RETURN_BYTEA_P(result); }
Datum hstore_send(PG_FUNCTION_ARGS) { HStore *in = PG_GETARG_HS(0); int i; int count = HS_COUNT(in); char *base = STRPTR(in); HEntry *entries = ARRPTR(in); StringInfoData buf; pq_begintypsend(&buf); pq_sendint(&buf, count, 4); for (i = 0; i < count; i++) { int32 keylen = HS_KEYLEN(entries, i); pq_sendint(&buf, keylen, 4); pq_sendtext(&buf, HS_KEY(entries, base, i), keylen); if (HS_VALISNULL(entries, i)) { pq_sendint(&buf, -1, 4); } else { int32 vallen = HS_VALLEN(entries, i); pq_sendint(&buf, vallen, 4); pq_sendtext(&buf, HS_VAL(entries, base, i), vallen); } } PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
Datum ora_nlssort(PG_FUNCTION_ARGS) { text *locale; text *result; if (PG_ARGISNULL(0)) PG_RETURN_NULL(); if (PG_ARGISNULL(1)) { if (def_locale != NULL) locale = def_locale; else { locale = palloc(VARHDRSZ); SET_VARSIZE(locale, VARHDRSZ); } } else { locale = PG_GETARG_TEXT_PP(1); } result = _nls_run_strxfrm(PG_GETARG_TEXT_PP(0), locale); if (! result) PG_RETURN_NULL(); PG_RETURN_BYTEA_P(result); }
Datum pg_digest(PG_FUNCTION_ARGS) { bytea *arg; text *name; unsigned len, hlen; PX_MD *md; bytea *res; name = PG_GETARG_TEXT_P(1); /* will give error if fails */ md = find_provider(name, (PFN) px_find_digest, "Digest", 0); hlen = px_md_result_size(md); res = (text *) palloc(hlen + VARHDRSZ); SET_VARSIZE(res, hlen + VARHDRSZ); arg = PG_GETARG_BYTEA_P(0); len = VARSIZE(arg) - VARHDRSZ; px_md_update(md, (uint8 *) VARDATA(arg), len); px_md_finish(md, (uint8 *) VARDATA(res)); px_md_free(md); PG_FREE_IF_COPY(arg, 0); PG_FREE_IF_COPY(name, 1); PG_RETURN_BYTEA_P(res); }
Datum pgbloomfun_init(PG_FUNCTION_ARGS) { int capacity = PG_GETARG_INT32(0); int growth_factor = PG_GETARG_INT32(1); double error_rate = PG_GETARG_FLOAT8(2); pgbloom_t pgbloom; size_t bloom_size; bytea *res; if (capacity <= 0) elog(ERROR, "pgbloomfun: bloom filter capacity must be positive"); if (growth_factor < 0 || growth_factor > 1000) elog(ERROR, "pgbloomfun: growth factor must be between 0 and 1000"); if (error_rate <= 0.0 || error_rate >= 1.0) elog(ERROR, "pgbloomfun: error rate must be higher than 0.0 and lower than 1.0"); pgbloom.version = PGBLOOM_VERSION; pgbloom.total_entries = pgbloom.last_entries = 0; pgbloom.total_capacity = pgbloom.last_capacity = capacity; pgbloom.growth_factor = growth_factor; pgbloom.error_rate = error_rate; pgbloom.filters = 1; bloom_init(&pgbloom.bloom, capacity, error_rate); bloom_size = sizeof(pgbloom) + pgbloom.bloom.bits / 8; res = palloc(VARHDRSZ + bloom_size); SET_VARSIZE(res, VARHDRSZ + bloom_size); memset(VARDATA(res), 0, bloom_size); memcpy(VARDATA(res), &pgbloom, sizeof(pgbloom)); PG_RETURN_BYTEA_P(res); }
/* * Read a section of a file, returning it as bytea */ Datum pg_read_binary_file(PG_FUNCTION_ARGS) { text *filename_t = PG_GETARG_TEXT_PP(0); int64 seek_offset = 0; int64 bytes_to_read = -1; bool missing_ok = false; char *filename; bytea *result; /* handle optional arguments */ if (PG_NARGS() >= 3) { seek_offset = PG_GETARG_INT64(1); bytes_to_read = PG_GETARG_INT64(2); if (bytes_to_read < 0) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("requested length cannot be negative"))); } if (PG_NARGS() >= 4) missing_ok = PG_GETARG_BOOL(3); filename = convert_and_check_filename(filename_t); result = read_binary_file(filename, seek_offset, bytes_to_read, missing_ok); if (result) PG_RETURN_BYTEA_P(result); else PG_RETURN_NULL(); }
Datum bloptions(PG_FUNCTION_ARGS) { Datum reloptions = PG_GETARG_DATUM(0); bool validate = PG_GETARG_BOOL(1); relopt_value *options; int numoptions; BloomOptions *rdopts; relopt_parse_elt tab[INDEX_MAX_KEYS+1]; int i; char buf[16]; tab[0].optname = "length"; tab[0].opttype = RELOPT_TYPE_INT; tab[0].offset = offsetof(BloomOptions, bloomLength); for(i=0;i<INDEX_MAX_KEYS;i++) { snprintf(buf, sizeof(buf), "col%d", i+1); tab[i+1].optname = pstrdup(buf); tab[i+1].opttype = RELOPT_TYPE_INT; tab[i+1].offset = offsetof(BloomOptions, bitSize[i]); } options = parseRelOptions(reloptions, validate, bloom_kind, &numoptions); rdopts = allocateReloptStruct(sizeof(BloomOptions), options, numoptions); fillRelOptions((void *) rdopts, sizeof(BloomOptions), options, numoptions, validate, tab, INDEX_MAX_KEYS+1); rdopts = makeDefaultBloomOptions(rdopts); PG_RETURN_BYTEA_P(rdopts); }
Datum x509_get_serial_number(PG_FUNCTION_ARGS) { bytea *raw; bytea *result; BIGNUM *bn; X509 *cert; // check for null value. raw = PG_GETARG_BYTEA_P(0); if (raw == NULL || VARSIZE(raw) == VARHDRSZ) { PG_RETURN_NULL(); } cert = x509_from_bytea(raw); if (cert == NULL) { ereport(ERROR, (errcode(ERRCODE_DATA_CORRUPTED), errmsg( "unable to decode X509 record"))); } bn = ASN1_INTEGER_to_BN(X509_get_serialNumber(cert), NULL); result = bn_to_bytea(bn); BN_free(bn); X509_free(cert); PG_RETURN_BYTEA_P(result); }
Datum x509_in(PG_FUNCTION_ARGS) { char *txt; bytea *result; X509 *x509; // check for null input txt = PG_GETARG_CSTRING(0); if (txt == NULL || strlen(txt) == 0) { PG_RETURN_NULL(); } // write X509 cert into buffer x509 = x509_from_string(txt); if (x509 == NULL) { ereport(ERROR, (errcode(ERRCODE_DATA_CORRUPTED), errmsg( "unable to decode X509 record"))); PG_RETURN_NULL(); } result = x509_to_bytea(x509); X509_free(x509); // return bytea PG_RETURN_BYTEA_P(result); }
Datum loread(PG_FUNCTION_ARGS) { int32 fd = PG_GETARG_INT32(0); int32 len = PG_GETARG_INT32(1); bytea *retval; int totalread; #ifdef PGXC #ifdef XCP ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("Postgres-XL does not yet support large objects"), errdetail("The feature is not currently supported"))); #else ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("Postgres-XC does not support large object yet"), errdetail("The feature is not currently supported"))); #endif #endif if (len < 0) len = 0; retval = (bytea *) palloc(VARHDRSZ + len); totalread = lo_read(fd, VARDATA(retval), len); SET_VARSIZE(retval, totalread + VARHDRSZ); PG_RETURN_BYTEA_P(retval); }
Datum enum_send(PG_FUNCTION_ARGS) { Oid enumval = PG_GETARG_OID(0); StringInfoData buf; HeapTuple tup; Form_pg_enum en; tup = SearchSysCache(ENUMOID, ObjectIdGetDatum(enumval), 0, 0, 0); if (!HeapTupleIsValid(tup)) ereport(ERROR, (errcode(ERRCODE_INVALID_BINARY_REPRESENTATION), errmsg("invalid internal value for enum: %u", enumval))); en = (Form_pg_enum) GETSTRUCT(tup); pq_begintypsend(&buf); pq_sendtext(&buf, NameStr(en->enumlabel), strlen(NameStr(en->enumlabel))); ReleaseSysCache(tup); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
Datum adaptive_counter_update(PG_FUNCTION_ARGS) { bytea * data; AdaptiveCounter ac; /* is the counter created (if not, create it - error 1%, 10mil items) */ if (PG_ARGISNULL(0)) { ac = as_create(0.01, 4); data = (bytea*)palloc(ac->length + VARHDRSZ); SET_VARSIZE(data, ac->length); memcpy(VARDATA(data), ac, ac->length); elog(NOTICE, "Adaptive Counter: %d bytes", ac->length); } else { data = PG_GETARG_BYTEA_P(0); } ac = (AdaptiveCounter)VARDATA(data); /* get the new item */ text * item = PG_GETARG_TEXT_P(1); /* in-place update works only if executed as aggregate */ as_add_element(ac, VARDATA(item), VARSIZE(item) - VARHDRSZ); /* return the updated bytea */ PG_RETURN_BYTEA_P(data); }
Datum ipaddr_send(PG_FUNCTION_ARGS) { IP_P arg1 = PG_GETARG_IP_P(0); StringInfoData buf; IP ip; int af = ip_unpack(arg1, &ip); pq_begintypsend(&buf); pq_sendbyte(&buf, af); pq_sendbyte(&buf, ip_maxbits(af)); pq_sendbyte(&buf, 1); pq_sendbyte(&buf, ip_sizeof(af)); switch (af) { case PGSQL_AF_INET: pq_sendint(&buf, ip.ip4, sizeof(IP4)); break; case PGSQL_AF_INET6: pq_sendint64(&buf, ip.ip6.bits[0]); pq_sendint64(&buf, ip.ip6.bits[1]); break; } PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
Datum nseq_send (PG_FUNCTION_ARGS) { NSEQ *nseq = PG_GETARG_NSEQ_P (0); PG_RETURN_BYTEA_P(nseq); }
Datum probabilistic_merge_agg(PG_FUNCTION_ARGS) { ProbabilisticCounter counter1; ProbabilisticCounter counter2 = (ProbabilisticCounter)PG_GETARG_BYTEA_P(1); /* is the counter created (if not, create it - error 1%, 10mil items) */ if (PG_ARGISNULL(0)) { /* just copy the second estimator into the first one */ counter1 = pc_copy(counter2); } else { /* ok, we already have the estimator - merge the second one into it */ counter1 = (ProbabilisticCounter)PG_GETARG_BYTEA_P(0); /* perform the merge (in place) */ counter1 = pc_merge(counter1, counter2, true); } /* return the updated bytea */ PG_RETURN_BYTEA_P(counter1); }
Datum hll_merge(PG_FUNCTION_ARGS) { bytea *state; uint32_t *sdata; bytea *value = PG_GETARG_BYTEA_P(1); uint32_t *vdata = (uint32_t *) VARDATA(value); if ( PG_ARGISNULL(0) ) { PG_RETURN_BYTEA_P(value); } else { state = PG_GETARG_BYTEA_P(0); sdata = (uint32_t *) VARDATA(state); } merge_sets(1 << sdata[0], vdata + 2, sdata + 2); PG_RETURN_BYTEA_P(state); }
Datum return_bson(const mongo::BSONObj& b) { std::size_t bson_size = b.objsize() + VARHDRSZ; bytea* new_bytea = (bytea *) palloc(bson_size); SET_VARSIZE(new_bytea, bson_size); std::memcpy(VARDATA(new_bytea), b.objdata(), b.objsize()); PG_RETURN_BYTEA_P(new_bytea); }
/* * like_escape_bytea() --- given a pattern and an ESCAPE string, * convert the pattern to use Postgres' standard backslash escape convention. */ Datum like_escape_bytea(PG_FUNCTION_ARGS) { bytea *pat = PG_GETARG_BYTEA_PP(0); bytea *esc = PG_GETARG_BYTEA_PP(1); bytea *result = SB_do_like_escape((text *) pat, (text *) esc); PG_RETURN_BYTEA_P((bytea *) result); }
/* * void_send - binary output routine for pseudo-type VOID. * * We allow this so that "SELECT function_returning_void(...)" works * even when binary output is requested. */ Datum void_send(PG_FUNCTION_ARGS) { StringInfoData buf; /* send an empty string */ pq_begintypsend(&buf); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
/* * xidsend - converts xid to binary format */ Datum xidsend(PG_FUNCTION_ARGS) { TransactionId arg1 = PG_GETARG_TRANSACTIONID(0); StringInfoData buf; pq_begintypsend(&buf); pq_sendint(&buf, arg1, sizeof(arg1)); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
/* * cidsend - converts cid to binary format */ Datum cidsend(PG_FUNCTION_ARGS) { CommandId arg1 = PG_GETARG_COMMANDID(0); StringInfoData buf; pq_begintypsend(&buf); pq_sendint(&buf, arg1, sizeof(arg1)); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
/* * charsend - converts char to binary format */ Datum charsend(PG_FUNCTION_ARGS) { char arg1 = PG_GETARG_CHAR(0); StringInfoData buf; pq_begintypsend(&buf); pq_sendbyte(&buf, arg1); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
/* * int2send - converts int2 to binary format */ Datum int2send(PG_FUNCTION_ARGS) { int16 arg1 = PG_GETARG_INT16(0); StringInfoData buf; pq_begintypsend(&buf); pq_sendint(&buf, arg1, sizeof(int16)); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }
/* * int8send - converts int8 to binary format */ Datum int8send(PG_FUNCTION_ARGS) { int64 arg1 = PG_GETARG_INT64(0); StringInfoData buf; pq_begintypsend(&buf); pq_sendint64(&buf, arg1); PG_RETURN_BYTEA_P(pq_endtypsend(&buf)); }