Skip to content

Commit

Permalink
Updated for for list of repos to use submodules from lcs repo. #4
Browse files Browse the repository at this point in the history
  • Loading branch information
manastalukdar committed Jan 10, 2021
1 parent ba719b8 commit c63d54f
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 17 deletions.
22 changes: 16 additions & 6 deletions repositories.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@




# Repositories

## Website
Expand All @@ -12,9 +16,15 @@

### Book

- [Building Microservices - Designing Fine-Grained Systems](https://github.com/computer-science-engineering/notes-book_bmdfgs)
- [Designing Data Intensive Applications](https://github.com/computer-science-engineering/notes-book_ddia)
- [Distributed systems for fun and profit](https://github.com/computer-science-engineering/notes-book_dsffp)
- [I Heart Logs: Event Data, Stream Processing, and Data Integration](https://github.com/computer-science-engineering/notes-book_i-heart-logs_jay-kreps)
- [Production-Ready Microservices](https://github.com/computer-science-engineering/notes-book_prm)
- [Scaling Teams: Strategies for Building Successful Teams and Organizations](https://github.com/computer-science-engineering/notes-book_scaling-teams_grosse_loftesness)
- [Building Microservices - Designing Fine-Grained Systems](https://github.com/computer-science-engineering/building-microservices_designing-fine-grained-systems)
- [Designing Data Intensive Applications](https://github.com/computer-science-engineering/designing-data-intensive-applications)
- [Distributed systems for fun and profit](https://github.com/computer-science-engineering/distributed-systems-for-fun-and-profit)
- [I Heart Logs: Event Data, Stream Processing, and Data Integration](https://github.com/computer-science-engineering/i-heart-logs_event-data-stream-processing-data-integration)
- [Production-Ready Microservices](https://github.com/computer-science-engineering/production-ready-microservices)
- [Scaling Teams: Strategies for Building Successful Teams and Organizations](https://github.com/computer-science-engineering/scaling-teams_strategies-for-building-successful-teams-and-organizations)
- [Building Microservices - Designing Fine-Grained Systems](https://github.com/computer-science-engineering/building-microservices_designing-fine-grained-systems)
- [Designing Data Intensive Applications](https://github.com/computer-science-engineering/designing-data-intensive-applications)
- [Distributed systems for fun and profit](https://github.com/computer-science-engineering/distributed-systems-for-fun-and-profit)
- [I Heart Logs: Event Data, Stream Processing, and Data Integration](https://github.com/computer-science-engineering/i-heart-logs_event-data-stream-processing-data-integration)
- [Production-Ready Microservices](https://github.com/computer-science-engineering/production-ready-microservices)
- [Scaling Teams: Strategies for Building Successful Teams and Organizations](https://github.com/computer-science-engineering/scaling-teams_strategies-for-building-successful-teams-and-organizations)
44 changes: 33 additions & 11 deletions src/update_repositories_md.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import os
import json
from itertools import chain
import git
from mdutils.mdutils import MdUtils
import numpy as np
from itertools import groupby
from operator import itemgetter
from operator import itemgetter, sub

LCS_REPO_NAME = 'learning-computer-science'


def walk_max_depth(top, maxdepth):
Expand All @@ -22,27 +26,43 @@ def find_files():
result = {}
root_dir = "./repositories"
cwd = os.getcwd()
path_separator = os.sep
#print(os.listdir(root_dir))
for root, dirs, files in walk_max_depth(root_dir, 2):
dirs.sort()
for file in files:
if file.endswith("metadata.json"):
metadatafile = os.path.normpath(os.path.join(
cwd, file)).replace(path_separator, "/")
contents = open(metadatafile)
metadata = json.load(contents)
result[root] = (metadatafile, metadata)
metadatafile = os.path.normpath(os.path.join(cwd, file))
metadata_file = open(metadatafile)
metadata = json.load(metadata_file)
result[os.path.normpath(root)] = (metadatafile, metadata)
return result


def get_submodules(files):
submodules_result = {}
cwd = os.getcwd()
for key, value in files.items():
repo = git.Repo(key)
for submodule in repo.submodules:
path_to_submodule_part = os.path.normpath(
os.path.join(key, submodule.path))
path_to_metadata_file = os.path.normpath(
os.path.join(cwd, path_to_submodule_part, 'metadata.json'))
metadata_file = open(path_to_metadata_file)
metadata = json.load(metadata_file)
submodules_result[path_to_submodule_part] = (path_to_metadata_file,
metadata)
return dict(
chain.from_iterable(d.items() for d in (files, submodules_result)))


def get_data(files):
data = []
for key, value in files.items():
data_dict = {}
data_dict['type'] = value[1]['type']
data_dict['name'] = value[1]['name']
local_path_parts = value[0].split('/')
local_path_parts = value[0].split(os.path.sep)
repo_name = local_path_parts[-2]
data_dict[
'url'] = f'https://github.com/computer-science-engineering/{repo_name}'
Expand All @@ -58,10 +78,11 @@ def create_file(files):
md_file.new_header(level=1, title='Repositories')
grouped_by_type = groupby(data, key=itemgetter('type'))
for key, value in grouped_by_type:
value_sorted = sorted(value, key=lambda x: x['name'])
md_file.new_header(level=2, title=key)
if key == 'Reading':
write_reading_entries(value, md_file)
for item in value:
write_reading_entries(value_sorted, md_file)
for item in value_sorted:
write_item(item, md_file)
md_file.new_line()
md_file.create_md_file()
Expand Down Expand Up @@ -91,7 +112,8 @@ def get_reading_sub_header(file):
def main():
"""main method."""
files = find_files()
create_file(files)
files_including_submodules = get_submodules(files)
create_file(files_including_submodules)


if __name__ == '__main__':
Expand Down

0 comments on commit c63d54f

Please sign in to comment.