-
Notifications
You must be signed in to change notification settings - Fork 0
/
run.py
executable file
·180 lines (145 loc) · 4.94 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
import logging
import os
import subprocess
import sys
import click
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from sqlalchemy import MetaData
import config
from db import sql_engine
from db.table_manager import drop_static_tables
from etf_history_api import save_history_api
#from etf_history_excel import save_history_excel
from extraetf import Extraetf
from frontend.app import run_gui
from isin_extractor import extract_isins_from_db
class AsciiArtGroup(click.Group):
def format_help(self, ctx, formatter):
click.echo("""
_____ _ __ _____ _ _ _
| ___| | / _| | _ | | | (_) (_)
| |__ | |_| |_ | | | |_ __ | |_ _ _ __ ___ _ _______ _ __
| __|| __| _| | | | | '_ \| __| | '_ ` _ \| |_ / _ \ '__|
| |___| |_| | \ \_/ / |_) | |_| | | | | | | |/ / __/ |
\____/ \__|_| \___/| .__/ \__|_|_| |_| |_|_/___\___|_|
| |
|_|
""")
super().format_help(ctx, formatter)
def set_log_level(level):
root = logging.getLogger()
root.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
root.addHandler(handler)
def run_crawler(name: str):
process = CrawlerProcess(get_project_settings())
process.crawl(name)
process.start()
@click.group(cls=AsciiArtGroup)
def etfopt():
pass
@etfopt.command()
def drop_static_data():
"""
Deletes tables holding static ETF data
"""
drop_static_tables(sql_engine)
click.echo('Successfully dropped tables')
@etfopt.command()
def crawl_extraetf():
"""
Runs a crawler for retrieving data from extraetf.com
"""
click.echo("Starting to crawl extraetf.com. Wait until you see the finish message. This might take a while ...")
extraetf = Extraetf()
extraetf.collect_data()
click.echo('Finished crawling extraetf.com')
@etfopt.command()
def crawl_justetf():
"""
Runs a crawler for retrieving data from justetf.com
"""
try:
click.echo("Starting to crawl justetf.com. Wait until you see the finish message. This might take a while ...")
run_crawler('justetf')
click.echo('Finished crawling the justetf.com website')
except:
click.echo('Failed crawling the justetf.com website')
raise
@etfopt.command()
@click.option('--outfile', '-o', default='extracted_isins.xlsx', help='output file for extracted isins')
def extract_isins(outfile):
"""
Extracts all ISINS from db to a csv file
"""
extract_isins_from_db(outfile)
click.echo(f"Wrote ISINs into {outfile}")
#@etfopt.command()
#@click.option('--historyfile', '-h', default='etf_history.csv',
#help='csv file containing etf history (output from Refinitiv)')
#@click.option('--isinfile', '-i', default='isin.csv', help='helper csv file containing isins')
#def import_history_excel(historyfile, isinfile):
#"""
#Retrieves historic etf data from Refinitiv (Excel)
#"""
#click.echo("Getting etf history...")
#save_history_excel(historyfile, isinfile)
#click.echo('Finished retrieving etf history')
@etfopt.command()
def import_history():
"""
Retrieves historic etf data from Refinitiv (API)
"""
click.echo("Getting etf history...")
save_history_api()
click.echo('Finished retrieving etf history')
@etfopt.command()
@click.option('--file', '-f', default='backup.sql', help='path to database import file')
def import_db(file):
"""
Imports the etf database from a file
"""
filepath = os.path.realpath(file)
if not os.path.isfile(file):
click.echo(f"Importing failed: could not find the file {filepath}")
return
result = ''
if click.confirm("Warning: This will delete all data prior to importing. Do you want to continue?"):
# drop all tables
meta = MetaData()
meta.bind = sql_engine
meta.reflect()
meta.drop_all()
try:
result = subprocess.check_output(
['psql', '-d', config.get_sql_uri(nodriver=True), '-f', f'{filepath}'])
click.echo("Etf database was imported successfully")
except subprocess.CalledProcessError:
click.echo("Importing the etf database failed")
click.echo(result)
else:
click.echo("Import aborted")
@etfopt.command()
def export_db():
"""
Exports the etf database into a file
"""
filepath = 'backup.sql'
result = ''
try:
result = subprocess.check_output(['pg_dump', '-d', config.get_sql_uri(nodriver=True), '-f', filepath])
click.echo(f"Etf database was exported successfully into the file '{os.getcwd()}/{filepath}'")
except subprocess.CalledProcessError:
click.echo("Exporting the etf database failed")
click.echo(result)
@etfopt.command()
def start_gui():
"""
Starts the graphical user interface
"""
run_gui()
if __name__ == '__main__':
set_log_level(logging.WARNING)
etfopt()