diff --git a/.gitignore b/.gitignore index c09ac6d..b616da8 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,5 @@ sroka.egg-info/ **.csv config.ini sroka_test_file.py +environments.txt build diff --git a/CODEOWNERS b/CODEOWNERS index 2fa6d4e..9994da9 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -2,4 +2,6 @@ # the repo. Unless a later match takes precedence, # @martynaut and @dorotamierzwa will be requested for # review when someone opens a pull request. -* @martynaut @dorotamierzwa +* @martynaut +* @Wikia/adeng +* @Wikia/data-engineering diff --git a/README.md b/README.md index f832762..ed8da69 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ Package providing simple Python access to data in: * MySQL * neo4j -Sroka library was checked to work for Python **3.7, 3.8 and 3.9**. +Sroka library was checked to work for Python **>=3.8, <=3.11**. ## Developers diff --git a/requirements.txt b/requirements.txt index 54ce97d..8a77f98 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ google-auth-httplib2>=0.0.3 google_api_python_client>=1.6.7 google_auth_oauthlib>=0.2.0 google-cloud-bigquery>=1.24.0 -googleads>=29.0.0 +googleads>=42.0.0 isort==4.3.9 lxml>=4.6.5 mysql-connector-python==8.0.17 @@ -15,6 +15,6 @@ pyarrow>=0.11.1 qds_sdk>=1.10.1 requests>=2.20 retrying>=1.3.3 -urllib3>=1.26.5 +urllib3>=1.26.18 py2neo>=4.2.0 db-dtypes diff --git a/setup.py b/setup.py index b9a337f..d8d2047 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setuptools.setup( name="sroka", - version="0.0.7", + version="0.0.8", author="Ad Engineering FANDOM", author_email="murbanek@fandom.com", description="Package for access GA, GAM, MOAT, Qubole, Athena, S3, Rubicon APIs, BigQuery, MySQL", diff --git a/sroka/api/s3_connection/s3_connection_api.py b/sroka/api/s3_connection/s3_connection_api.py index 3370925..9e9ec81 100644 --- a/sroka/api/s3_connection/s3_connection_api.py +++ b/sroka/api/s3_connection/s3_connection_api.py @@ -31,7 +31,7 @@ def _download_data(key_prefix, s3, bucket_name, prefix, sep, skip_empty_files=Tr print('File not found on s3') return pd.DataFrame([]) try: - df_list.append(pd.read_csv(data, error_bad_lines=False, warn_bad_lines=False, sep=sep, + df_list.append(pd.read_csv(data, on_bad_lines='skip', sep=sep, header=header_setting)) except UnicodeDecodeError: df_list.append(pq.read_pandas(data).to_pandas()) @@ -47,7 +47,7 @@ def _download_data(key_prefix, s3, bucket_name, prefix, sep, skip_empty_files=Tr if 'SUCCESS' not in file.key: tmp = StringIO(str(file.get()['Body'].read(), 'utf-8')) try: - data = pd.read_csv(tmp, error_bad_lines=False, warn_bad_lines=False, sep=sep, + data = pd.read_csv(tmp, on_bad_lines='skip', sep=sep, header=header_setting) df_list.append(data) except EmptyDataError: