Skip to content

Commit

Permalink
chore(release): Add release workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
kouloumos committed Jan 3, 2025
1 parent d89cb17 commit ee55bcd
Show file tree
Hide file tree
Showing 3 changed files with 152 additions and 2 deletions.
52 changes: 52 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: Release

on:
workflow_dispatch:

jobs:
release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"

- name: Install Poetry
uses: snok/install-poetry@v1

- name: Install dependencies
run: poetry install

- name: Configure Git
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
- name: Bump version and update changelog
id: bump
run: |
output=$(poetry run cz bump --yes)
tag=$(echo "$output" | grep "tag to create:" | awk '{print $4}')
echo "::set-output name=version::${tag}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Build package
run: poetry build

- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
body_path: CHANGELOG.md
tag_name: ${{ steps.bump.outputs.version }}
files: |
dist/*.whl
dist/*.tar.gz
89 changes: 88 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 12 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "scraper"
version = "0.1.0"
description = "Scraper is designed to automate the process of gathering information from a variety of key Bitcoin-related sources. It leverages GitHub Actions to schedule nightly cron jobs, ensuring that the most up-to-date content is captured from each source according to a defined frequency. The scraped data are then stored in an Elasticsearch index."
description = "A flexible multi-source scraper application designed to gather information from GitHub repositories and web pages. Leverages both Git-based and Scrapy-based approaches to handle different source types effectively."
authors = ["kouloumos <[email protected]>", "urvish patel <[email protected]>"]
readme = "README.md"
packages = [{include = "scraper"}]
Expand All @@ -18,6 +18,7 @@ python-dotenv = "^1.0.1"
aiohttp = "^3.10.5"
scrapy = "^2.11.2"
openai = "^1.52.2"
beautifulsoup4 = "^4.12.3"


[tool.poetry.group.dev.dependencies]
Expand All @@ -26,11 +27,21 @@ pandas = "^2.2.3"
matplotlib = "^3.9.2"
tabulate = "^0.9.0"
seaborn = "^0.13.2"
commitizen = "^4.1.0"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

[tool.commitizen]
name = "cz_conventional_commits"
version = "0.1.0"
tag_format = "v$version"
version_files = [
"pyproject.toml:version",
]
update_changelog_on_bump = true

[tool.poetry.scripts]
scraper = "scraper.cli:cli"
playground = "scraper.run_jupyter:main"
Expand Down

0 comments on commit ee55bcd

Please sign in to comment.