Skip to content

Commit

Permalink
taking new commits to 5x
Browse files Browse the repository at this point in the history
Signed-off-by: pranav jain <[email protected]>
  • Loading branch information
pranavJ23 committed Jan 6, 2025
1 parent fbbb062 commit ab10aba
Show file tree
Hide file tree
Showing 105 changed files with 464 additions and 11,748 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ runs:
FROM pg_proc
WHERE proname = 'check_for_inconsistent_metadata';" | grep -o 'exists\|not_exists')
echo "function_exists=$function_exists" >> $GITHUB_OUTPUT
echo "::set-output name=function_exists::$function_exists"
echo "Check Babelfish metadata inconsistency function exists: $function_exists"
# If the function exists, run the metadata inconsistency check
Expand Down
2 changes: 1 addition & 1 deletion .github/composite-actions/install-dependencies/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ runs:
curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
sudo apt-get update --fix-missing -y
sudo apt-get install uuid-dev openjdk-21-jre libicu-dev libxml2-dev openssl libssl-dev python3-dev libossp-uuid-dev libpq-dev cmake pkg-config g++ build-essential bison mssql-tools unixodbc-dev libsybdb5 freetds-dev freetds-common gdal-bin libgdal-dev libgeos-dev gdb libkrb5-dev
sudo apt-get install uuid-dev openjdk-8-jre libicu-dev libxml2-dev openssl libssl-dev python3-dev libossp-uuid-dev libpq-dev cmake pkg-config g++ build-essential bison mssql-tools unixodbc-dev libsybdb5 freetds-dev freetds-common gdal-bin libgdal-dev libgeos-dev gdb libkrb5-dev
sudo apt install -y ccache
sudo apt-get install lcov
sudo /usr/sbin/update-ccache-symlinks
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/code-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ jobs:

- name: Download CSV report from previous run
if: (github.event_name == 'schedule')
uses: dawidd6/action-download-artifact@v6
uses: dawidd6/action-download-artifact@v2
with:
name: csv_${{github.ref_name}}
path: contrib/
Expand Down
20 changes: 10 additions & 10 deletions .github/workflows/pg_dump-restore-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
dump_format: itm[itm.length - 1]['dump-format'], \
type: itm[itm.length - 1]['type']})); \
console.log(JSON.stringify(p));")
echo "dump-restore-path-list=$DUMP_RESTORE_PATH_LIST" >> $GITHUB_OUTPUT
echo "::set-output name=dump-restore-path-list::$DUMP_RESTORE_PATH_LIST"
run-dump-restore-test:
needs: generate-dump-restore-tests
Expand All @@ -45,24 +45,24 @@ jobs:
id: read-base-and-final-version
if: always() && steps.install-yq.outcome == 'success'
run: >
echo "base-version=$(
echo "::set-output name=base-version::$(
yq '."dump-restore-version"[${{ matrix.upgrade-path.id }}][0].version' ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml
)" >> $GITHUB_OUTPUT &&
echo "final-version=$(
)" &&
echo "::set-output name=final-version::$(
yq '."dump-restore-version"[${{ matrix.upgrade-path.id }}][-1].version' ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml
)" >> $GITHUB_OUTPUT
)"
- name: Find Engine and Extension Branches for Base Version ${{ steps.read-base-and-final-version.outputs.base-version }}
id: find-branch
if: always() && steps.read-base-and-final-version.outcome == 'success'
run: >
echo "base-engine-branch=$(
echo "::set-output name=base-engine-branch::$(
yq '."${{ steps.read-base-and-final-version.outputs.base-version }}".engine_branch' ${{ github.workspace }}/.github/template/version-branch-template.yml
)" >> $GITHUB_OUTPUT &&
echo "base-extension-branch=$(
)" &&
echo "::set-output name=base-extension-branch::$(
yq '."${{ steps.read-base-and-final-version.outputs.base-version }}".extension_branch' ${{ github.workspace }}/.github/template/version-branch-template.yml
)" >> $GITHUB_OUTPUT &&
echo "base-dir=$(echo psql$(awk -F. '{print $1}' <<< ${{ steps.read-base-and-final-version.outputs.base-version }}))" >> $GITHUB_OUTPUT
)" &&
echo "::set-output name=base-dir::$(echo psql$(awk -F. '{print $1}' <<< ${{ steps.read-base-and-final-version.outputs.base-version }}))"
- name: Setup Base Version ${{ steps.read-base-and-final-version.outputs.base-version }} and Run Preparation Tests
id: setup-base-version
Expand Down
20 changes: 10 additions & 10 deletions .github/workflows/upgrade-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
config="'$(yq -o=json ${{ github.workspace }}/.github/configuration/upgrade-test-configuration.yml)'"
config=$(echo $config | sed "s/\"/\\\\\"/g")
UPGRADE_PATH_LIST=$(node -e "let k = JSON.parse($config); let p = k['upgrade-version'].map((itm, index) => ({ id: index, path: itm['upgrade-path'].map(i => i.version.toString().replace(/[.]/g, \"_\")), server_collation_name: itm['babelfishpg_tsql_server_collation_name'] ? itm['babelfishpg_tsql_server_collation_name'] : \"default\", title: itm['upgrade-path'].map(i => i.version.toString().replace(/[.]/g, \"_\")).join(\"-\") + (itm['babelfishpg_tsql_server_collation_name'] ? (\" with server-collation - \" + itm['babelfishpg_tsql_server_collation_name'] + \" \") : \"\"), last_version: itm['upgrade-path'][itm['upgrade-path'].length - 1].version.toString().replace(/[.]/g, \"_\") })); console.log(JSON.stringify(p));")
echo "upgrade-path-list=$UPGRADE_PATH_LIST" >> $GITHUB_OUTPUT
echo "::set-output name=upgrade-path-list::$UPGRADE_PATH_LIST"
run-version-upgrade-test:
needs: generate-version-upgrade-tests
Expand All @@ -38,24 +38,24 @@ jobs:
id: read-base-and-final-version
if: always() && steps.install-yq.outcome == 'success'
run: >
echo "base-version=$(
echo "::set-output name=base-version::$(
yq '."upgrade-version"[${{ matrix.upgrade-path.id }}]."upgrade-path"[0].version' ${{ github.workspace }}/.github/configuration/upgrade-test-configuration.yml
)" >> $GITHUB_OUTPUT &&
echo "final-version=$(
)" &&
echo "::set-output name=final-version::$(
yq '."upgrade-version"[${{ matrix.upgrade-path.id }}]."upgrade-path"[-1].version' ${{ github.workspace }}/.github/configuration/upgrade-test-configuration.yml
)" >> $GITHUB_OUTPUT
)"
- name: Find Engine and Extension Branches for Base Version ${{ steps.read-base-and-final-version.outputs.base-version }}
id: find-branch
if: always() && steps.read-base-and-final-version.outcome == 'success'
run: >
echo "base-engine-branch=$(
echo "::set-output name=base-engine-branch::$(
yq '."${{ steps.read-base-and-final-version.outputs.base-version }}".engine_branch' ${{ github.workspace }}/.github/template/version-branch-template.yml
)" >> $GITHUB_OUTPUT &&
echo "base-extension-branch=$(
)" &&
echo "::set-output name=base-extension-branch::$(
yq '."${{ steps.read-base-and-final-version.outputs.base-version }}".extension_branch' ${{ github.workspace }}/.github/template/version-branch-template.yml
)" >> $GITHUB_OUTPUT &&
echo "base-dir=$(echo psql$(awk -F. '{print $1}' <<< ${{ steps.read-base-and-final-version.outputs.base-version }}))" >> $GITHUB_OUTPUT
)" &&
echo "::set-output name=base-dir::$(echo psql$(awk -F. '{print $1}' <<< ${{ steps.read-base-and-final-version.outputs.base-version }}))"
- name: Setup Base Version ${{ steps.read-base-and-final-version.outputs.base-version }} and Run Preparation Tests
id: setup-base-version
Expand Down
2 changes: 1 addition & 1 deletion INSTALLING.md.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ sudo apt-get update && sudo apt install -y --no-install-recommends \
cmake lld apt-utils libossp-uuid-dev gnulib bison \
xsltproc icu-devtools libicu66 \
libicu-dev gawk \
curl openjdk-21-jre openssl \
curl openjdk-8-jre openssl \
g++ libssl-dev python-dev libpq-dev \
pkg-config libutfcpp-dev \
gnupg unixodbc-dev net-tools unzip
Expand Down
2 changes: 1 addition & 1 deletion contrib/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ The following build instructions comply with Ubuntu 20.04 and Amazon Linux 2 env
You'll also need to install `gcc`, `gcc-c++`, `java` and `bison`.

```
sudo apt-get install uuid-dev openjdk-21-jre \
sudo apt-get install uuid-dev openjdk-8-jre \
libicu-dev libxml2-dev openssl libssl-dev python-dev \
libossp-uuid-dev libpq-dev pkg-config g++ build-essential bison
```
Expand Down
19 changes: 0 additions & 19 deletions contrib/babelfishpg_common/src/collation.c
Original file line number Diff line number Diff line change
Expand Up @@ -1326,24 +1326,6 @@ has_ilike_node(Node *expr)
return false;
}

bool
has_like_node(Node *expr)
{
OpExpr *op;

Assert(IsA(expr, OpExpr));

op = (OpExpr *) expr;
for (int i = 0; i < TOTAL_LIKE_OP_COUNT; i++)
{
if (strcmp(get_opname(op->opno), like_ilike_table[i].like_op_name) == 0)
{
return true;
}
}
return false;
}

Datum
is_collated_ci_as_internal(PG_FUNCTION_ARGS)
{
Expand Down Expand Up @@ -1643,7 +1625,6 @@ get_collation_callbacks(void)
collation_callbacks_var.find_cs_as_collation_internal = &find_cs_as_collation;
collation_callbacks_var.find_collation_internal = &find_collation;
collation_callbacks_var.has_ilike_node = &has_ilike_node;
collation_callbacks_var.has_like_node = &has_like_node;
collation_callbacks_var.translate_bbf_collation_to_tsql_collation = &translate_bbf_collation_to_tsql_collation;
collation_callbacks_var.translate_tsql_collation_to_bbf_collation = &translate_tsql_collation_to_bbf_collation;
collation_callbacks_var.set_db_collation = &set_db_collation;
Expand Down
3 changes: 0 additions & 3 deletions contrib/babelfishpg_common/src/collation.h
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,6 @@ typedef struct collation_callbacks

bool (*has_ilike_node) (Node *expr);

bool (*has_like_node) (Node *expr);

const char *(*translate_bbf_collation_to_tsql_collation) (const char *collname);

const char *(*translate_tsql_collation_to_bbf_collation) (const char *collname);
Expand Down Expand Up @@ -149,7 +147,6 @@ extern const char *translate_bbf_collation_to_tsql_collation(const char *collnam
extern const char *translate_tsql_collation_to_bbf_collation(const char *collname);
Oid get_oid_from_collidx(int collidx);
extern bool has_ilike_node(Node *expr);
extern bool has_like_node(Node *expr);
extern Oid babelfish_define_type_default_collation(Oid typeNamespace);
extern void set_db_collation(Oid db_coll);

Expand Down
3 changes: 1 addition & 2 deletions contrib/babelfishpg_tds/src/backend/tds/tds_srv.c
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,7 @@ static ProtocolExtensionConfig pe_config = {
TdsShutdown,
TdsDestroy,
pe_process_command,
pe_report_param_status,
TdsDirectSSLHandshake
pe_report_param_status
};

/*
Expand Down
7 changes: 0 additions & 7 deletions contrib/babelfishpg_tds/src/backend/tds/tdsprinttup.c
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,3 @@ TdsDestroy(DestReceiver *self)
{
pfree(self);
}

static int
TdsDirectSSLHandshake(struct Port *port)
{
/* Always return STATUS_OK for TDS connections */
return STATUS_OK;
}
2 changes: 0 additions & 2 deletions contrib/babelfishpg_tds/src/backend/tds/tdsresponse.c
Original file line number Diff line number Diff line change
Expand Up @@ -1769,8 +1769,6 @@ PrepareRowDescription(TupleDesc typeinfo, PlannedStmt *plannedstmt, List *target
/*
* Get the IO function info from our type cache
*/
if (atttypmod == TSQLMaxTypmod)
atttypmod = -1;
finfo = TdsLookupTypeFunctionsByOid(atttypid, &atttypmod);
/* atttypid = getBaseTypeAndTypmod(atttypid, &atttypmod); */
#if 0
Expand Down
2 changes: 1 addition & 1 deletion contrib/babelfishpg_tds/src/backend/tds/tdsrpc.c
Original file line number Diff line number Diff line change
Expand Up @@ -1587,7 +1587,7 @@ ReadParameters(TDSRequestSP request, uint64_t offset, StringInfo message, int *p
* Sets the col metadata and also the corresponding row
* data.
*/
SetColMetadataForTvp(temp, message, &offset, request->name.data);
SetColMetadataForTvp(temp, message, &offset);
}
break;
case TDS_TYPE_BINARY:
Expand Down
48 changes: 17 additions & 31 deletions contrib/babelfishpg_tds/src/include/tds_request.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
#include "src/include/tds_typeio.h"
#include "src/collation.h"


/* Different TDS request types returned by GetTDSRequest() */
typedef enum TDSRequestType
{
Expand Down Expand Up @@ -499,16 +498,16 @@ SetTvpRowData(ParameterToken temp, const StringInfo message, uint64_t *offset)
}

static inline void
SetColMetadataForTvp(ParameterToken temp, const StringInfo message, uint64_t *offset, char *proc_name)
SetColMetadataForTvp(ParameterToken temp, const StringInfo message, uint64_t *offset)
{
uint8_t len;
uint16 colCount;
uint16 isTvpNull;
char *tempString;
int i = 0;
char *messageData = message->data;
StringInfo tempStringInfo = palloc(sizeof(StringInfoData));
uint32_t collation;
uint8_t len;
uint16 colCount;
uint16 isTvpNull;
char *tempString;
int i = 0;
char *messageData = message->data;
StringInfo tempStringInfo = palloc(sizeof(StringInfoData));
uint32_t collation;

/* Database-Name.Schema-Name.TableType-Name */
for (; i < 3; i++)
Expand Down Expand Up @@ -537,34 +536,21 @@ SetColMetadataForTvp(ParameterToken temp, const StringInfo message, uint64_t *of
if (i == 1)
temp->tvpInfo->tvpTypeSchemaName = tempStringInfo->data;
else
{
temp->tvpInfo->tvpTypeName = tempStringInfo->data;
temp->tvpInfo->tableName = tempStringInfo->data;
}

}
else if (i == 2)
{
char *tvp_type_name;
char *tvp_type_schema_name;
/*
* Fetch the TVP typeName and schemaName from catalog search
* based on object name and argument name.
*/
pltsql_plugin_handler_ptr->get_tvp_typename_typeschemaname(proc_name,
temp->paramMeta.colName.data,
&tvp_type_name,
&tvp_type_schema_name);
temp->len += strlen(tvp_type_schema_name);
temp->tvpInfo->tvpTypeSchemaName = pstrdup(tvp_type_schema_name);

pfree(tvp_type_schema_name);

temp->len += strlen(tvp_type_name);
temp->tvpInfo->tvpTypeName = tvp_type_name;
temp->tvpInfo->tableName = tvp_type_name;
/* Throw error if TabelType-Name is not provided */
ereport(ERROR,
(errcode(ERRCODE_PROTOCOL_VIOLATION),
errmsg("The incoming tabular data stream (TDS) remote procedure call (RPC) protocol stream is incorrect. "
"Table-valued parameter %d, to a parameterized string has no table type defined.",
temp->paramOrdinal + 1)));
}
}

temp->tvpInfo->tableName = tempStringInfo->data;
i = 0;

memcpy(&isTvpNull, &messageData[*offset], sizeof(uint16));
Expand Down
68 changes: 54 additions & 14 deletions contrib/babelfishpg_tsql/runtime/functions.c
Original file line number Diff line number Diff line change
Expand Up @@ -2133,6 +2133,7 @@ object_id(PG_FUNCTION_ARGS)
char *physical_schema_name;
char *input;
char *object_type = NULL;
char **splited_object_name;
Oid schema_oid;
Oid user_id = GetUserId();
Oid result = InvalidOid;
Expand Down Expand Up @@ -2172,13 +2173,31 @@ object_id(PG_FUNCTION_ARGS)
(errcode(ERRCODE_STRING_DATA_LENGTH_MISMATCH),
errmsg("input value is too long for object name")));

/*
* Split the input string, downcase and truncate if needed
* and return the db_name, schema_name and object_name.
*/
downcase_truncate_split_object_name(input, NULL, &db_name, &schema_name, &object_name);
/* resolve the three part name */
splited_object_name = split_object_name(input);
db_name = splited_object_name[1];
schema_name = splited_object_name[2];
object_name = splited_object_name[3];

/* downcase identifier if needed */
if (pltsql_case_insensitive_identifiers)
{
db_name = downcase_identifier(db_name, strlen(db_name), false, false);
schema_name = downcase_identifier(schema_name, strlen(schema_name), false, false);
object_name = downcase_identifier(object_name, strlen(object_name), false, false);
for (int j = 0; j < 4; j++)
pfree(splited_object_name[j]);
}
else
pfree(splited_object_name[0]);

pfree(input);
pfree(splited_object_name);

/* truncate identifiers if needed */
truncate_tsql_identifier(db_name);
truncate_tsql_identifier(schema_name);
truncate_tsql_identifier(object_name);

if (!strcmp(db_name, ""))
db_name = get_cur_db_name();
Expand Down Expand Up @@ -2596,6 +2615,7 @@ type_id(PG_FUNCTION_ARGS)
*object_name;
char *physical_schema_name;
char *input;
char **splitted_object_name;
Oid schema_oid = InvalidOid;
Oid user_id = GetUserId();
Oid result = InvalidOid;
Expand All @@ -2620,20 +2640,40 @@ type_id(PG_FUNCTION_ARGS)
(errcode(ERRCODE_STRING_DATA_LENGTH_MISMATCH),
errmsg("input value is too long for object name")));

/*
* Split the input string, downcase and truncate if needed
* and return the db_name, schema_name and object_name.
*/
downcase_truncate_split_object_name(input, NULL, &db_name, &schema_name, &object_name);

pfree(input);

/* resolve the two part name */
splitted_object_name = split_object_name(input);
/* If three part name(db_name also included in input) then return null */
if(pg_mbstrlen(db_name) != 0)
if(pg_mbstrlen(splitted_object_name[1]) != 0)
{
pfree(input);
for (int j = 0; j < 4; j++)
pfree(splitted_object_name[j]);
pfree(splitted_object_name);
PG_RETURN_NULL();
}
db_name = get_cur_db_name();
schema_name = splitted_object_name[2];
object_name = splitted_object_name[3];

/* downcase identifier if needed */
if (pltsql_case_insensitive_identifiers)
{
db_name = downcase_identifier(db_name, strlen(db_name), false, false);
schema_name = downcase_identifier(schema_name, strlen(schema_name), false, false);
object_name = downcase_identifier(object_name, strlen(object_name), false, false);
for (int k = 0; k < 4; k++)
pfree(splitted_object_name[k]);
}
else
pfree(splitted_object_name[0]);

pfree(input);
pfree(splitted_object_name);

/* truncate identifiers if needed */
truncate_tsql_identifier(db_name);
truncate_tsql_identifier(schema_name);
truncate_tsql_identifier(object_name);

if (!strcmp(schema_name, ""))
{
Expand Down
Loading

0 comments on commit ab10aba

Please sign in to comment.