Skip to content

Commit

Permalink
Merge branch 'dev' into startup-wizard
Browse files Browse the repository at this point in the history
  • Loading branch information
Ulincsys committed May 29, 2024
2 parents 53290f2 + ca489d5 commit 22ba0d6
Show file tree
Hide file tree
Showing 166 changed files with 2,601 additions and 1,730 deletions.
100 changes: 0 additions & 100 deletions .docker-setup.sh

This file was deleted.

27 changes: 27 additions & 0 deletions .github/workflows/auto_merge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Merge main into dev

on:
push:
branches:
- main

jobs:
update-dev:
permissions: write-all
name: update-dev
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- run: |
git config user.name 'GitHub Actions'
git config user.email '[email protected]'
git checkout dev
git merge main
echo "Done with merge"
- name: Push to dev
uses: CasperWA/push-protected@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: dev
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
#refactoring checker
#enable=R

disable=E0611,E1101,W1203,R0801,W0614,W0611,C0411,C0103,C0301,C0303,C0304,C0305,W0311,E0401
disable=E0611,E1101,W1203,R0801,W0614,W0611,C0411,C0103,C0301,C0303,C0304,C0305,W0311,E0401,C0116


# Analyse import fallback blocks. This can be used to support both Python 2 and
Expand Down
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Augur NEW Release v0.62.4
# Augur NEW Release v0.70.0

Augur is primarily a data engineering tool that makes it possible for data scientists to gather open source software community data. Less data carpentry for everyone else!
The primary way of looking at Augur data is through [8Knot](https://github.com/oss-aspen/8knot) ... A public instance of 8Knot is available at https://metrix.chaoss.io ... That is tied to a public instance of Augur at https://ai.chaoss.io
Expand All @@ -10,7 +10,8 @@ The primary way of looking at Augur data is through [8Knot](https://github.com/o
## NEW RELEASE ALERT!
### [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md)

Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.62.4
Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.70.0

- The `main` branch is a stable version of our new architecture, which features:
- Dramatic improvement in the speed of large scale data collection (100,000+ repos). All data is obtained for 100k+ repos within 2 weeks.
- A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard
Expand Down
63 changes: 32 additions & 31 deletions augur/api/gunicorn_conf.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,48 @@
# from augur import ROOT_AUGUR_DIRECTORY
import multiprocessing
import logging
import os
from pathlib import Path
from glob import glob
import shutil

from augur.application.db.session import DatabaseSession
from augur.application.config import AugurConfig
from augur.application.db.lib import get_value, get_section
from augur.application.db import dispose_database_engine

logger = logging.getLogger(__name__)
with DatabaseSession(logger) as session:

augur_config = AugurConfig(logger, session)


# ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

# base_log_dir = ROOT_AUGUR_DIRECTORY + "/logs/"
# ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

# base_log_dir = ROOT_AUGUR_DIRECTORY + "/logs/"

# Path(base_log_dir).mkdir(exist_ok=True)

# Path(base_log_dir).mkdir(exist_ok=True)
workers = multiprocessing.cpu_count() * 2 + 1
umask = 0o007
reload = True
reload_extra_files = glob(str(Path.cwd() / '**/*.j2'), recursive=True)

workers = multiprocessing.cpu_count() * 2 + 1
umask = 0o007
reload = True
reload_extra_files = glob(str(Path.cwd() / '**/*.j2'), recursive=True)
# set the log location for gunicorn
logs_directory = get_value('Logging', 'logs_directory')
accesslog = f"{logs_directory}/gunicorn.log"
errorlog = f"{logs_directory}/gunicorn.log"

# set the log location for gunicorn
logs_directory = augur_config.get_value('Logging', 'logs_directory')
accesslog = f"{logs_directory}/gunicorn.log"
errorlog = f"{logs_directory}/gunicorn.log"
ssl_bool = get_value('Server', 'ssl')

if ssl_bool is True:

workers = int(get_value('Server', 'workers'))
bind = '%s:%s' % (get_value("Server", "host"), get_value("Server", "port"))
timeout = int(get_value('Server', 'timeout'))
certfile = str(get_value('Server', 'ssl_cert_file'))
keyfile = str(get_value('Server', 'ssl_key_file'))

else:
workers = int(get_value('Server', 'workers'))
bind = '%s:%s' % (get_value("Server", "host"), get_value("Server", "port"))
timeout = int(get_value('Server', 'timeout'))

ssl_bool = augur_config.get_value('Server', 'ssl')

if ssl_bool is True:
def worker_exit(server, worker):
print("Stopping gunicorn worker process")
dispose_database_engine()

workers = int(augur_config.get_value('Server', 'workers'))
bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port"))
timeout = int(augur_config.get_value('Server', 'timeout'))
certfile = str(augur_config.get_value('Server', 'ssl_cert_file'))
keyfile = str(augur_config.get_value('Server', 'ssl_key_file'))

else:
workers = int(augur_config.get_value('Server', 'workers'))
bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port"))
timeout = int(augur_config.get_value('Server', 'timeout'))
1 change: 0 additions & 1 deletion augur/api/metrics/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import datetime
import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
from augur.application.db.engine import engine
```
3. Defining the function
1. Add the decorator @register_metric to the function
Expand Down
18 changes: 9 additions & 9 deletions augur/api/metrics/commit.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
import datetime
import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
from flask import current_app

from ..server import engine
from augur.api.util import register_metric

@register_metric()
def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, period='month'):
Expand Down Expand Up @@ -90,7 +90,7 @@ def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, peri
"""
)

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committersSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date, 'period':period})

Expand Down Expand Up @@ -168,7 +168,7 @@ def annual_commit_count_ranked_by_new_repo_in_repo_group(repo_group_id, repo_id=
ORDER BY YEAR ASC
""".format(table, period))

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(cdRgNewrepRankedCommitsSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date})
return results
Expand Down Expand Up @@ -267,7 +267,7 @@ def annual_commit_count_ranked_by_repo_in_repo_group(repo_group_id, repo_id=None
LIMIT 10
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(cdRgTpRankedCommitsSQL, conn, params={ "repo_group_id": repo_group_id,
"repo_id": repo_id})
return results
Expand Down Expand Up @@ -299,7 +299,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
Expand All @@ -312,7 +312,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_id': repo_id})

Expand All @@ -339,7 +339,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
Expand All @@ -359,7 +359,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_id': repo_id})

Expand Down
Loading

0 comments on commit 22ba0d6

Please sign in to comment.