Spaces:
Configuration error
Configuration error
| import pandas as pd | |
| import streamlit as st | |
| import pybase64 as base64 | |
| import io | |
| from logs_portal import log | |
| import os | |
| from datetime import date | |
| from modules import tables | |
| import boto3 | |
| from Data.credentials import credentials_s3 as creds3 | |
| from streamlit_echarts import st_echarts | |
| from st_aggrid import GridOptionsBuilder, AgGrid, GridUpdateMode, DataReturnMode, JsCode | |
| import numpy as np | |
| from streamlit_lottie import st_lottie | |
| import json | |
| def generador_variable_pond(name, col_s1): | |
| col_s1.markdown("""<p style="margin-top:35px; | |
| font-size:20px; | |
| text-align:center; | |
| margin-bottom:30px; | |
| ">{Var}</p>""".format(Var=name), | |
| unsafe_allow_html=True) | |
| def button_style(): | |
| style_button = """ | |
| <style> | |
| button { | |
| display: inline-block; | |
| background-color: white; | |
| border-radius: 15px; | |
| border: 4px #cccccc; | |
| color: #4a4a4a; | |
| text-align: center; | |
| font-size: 18px; | |
| padding: 2px; | |
| width: 200px; | |
| transition: all 0.5s; | |
| cursor: pointer; | |
| margin-top: 25px; | |
| } | |
| button span { | |
| cursor: pointer; | |
| display: inline-block; | |
| position: relative; | |
| transition: 0.5s; | |
| } | |
| button span:after { | |
| content: '\00bb'; | |
| position: absolute; | |
| opacity: 0; | |
| top: 0; | |
| right: -20px; | |
| transition: 0.5s; | |
| } | |
| button:hover { | |
| background-color: #bb1114; | |
| color:#e8e8e8; | |
| } | |
| button:hover span { | |
| padding-right: 25px; | |
| } | |
| button:hover span:after { | |
| opacity: 1; | |
| right: 0; | |
| } | |
| </style> | |
| """ | |
| st.markdown(style_button, unsafe_allow_html=True) | |
| def get_table_excel_link(df, name): | |
| towrite = io.BytesIO() | |
| writer = pd.ExcelWriter(towrite, engine='xlsxwriter') | |
| downloaded_file = df.to_excel(writer, encoding='utf-8', index=False, | |
| header=True) | |
| workbook = writer.book | |
| worksheet = writer.sheets["Sheet1"] | |
| #set the column width as per your requirement | |
| worksheet.set_column('A:BZ', 18) | |
| writer.save() | |
| towrite.seek(0) # reset pointer | |
| file_name = 'Scoring.xlsx' | |
| style = 'style="color:black;text-decoration: none; font-size:18px;" ' | |
| name_mark = name | |
| b64 = base64.b64encode(towrite.read()).decode() # some strings | |
| linko = f'<center><a href="data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,{b64}" '+style+'download="'+file_name+'"><button>'+name_mark+'</button></a></center>' | |
| return linko | |
| def read_excel_s3(key, secret_key, bucket, path): | |
| s3_client = boto3.client('s3', aws_access_key_id = key, aws_secret_access_key= secret_key) | |
| response = s3_client.get_object(Bucket=bucket, Key=path) | |
| data = response["Body"].read() | |
| df = pd.read_excel(io.BytesIO(data), engine='openpyxl') | |
| return df | |
| def display_table(df: pd.DataFrame, name): | |
| # Configure AgGrid options | |
| gb = GridOptionsBuilder.from_dataframe(df) | |
| gb.configure_selection(selection_mode="multiple", use_checkbox=True,) | |
| gb.configure_column(name, headerCheckboxSelection = True) | |
| gb.configure_columns(("TICKER", "COUNTRY", "LV1"), pinned=True) | |
| return AgGrid( | |
| df, gridOptions=gb.build(), | |
| update_mode=GridUpdateMode.SELECTION_CHANGED, | |
| enable_enterprise_modules=True) | |
| def read_scoring(): | |
| key = creds3["S3_KEY_ID"] | |
| secret_key = creds3["S3_SECRET_KEY"] | |
| bucket = creds3["S3_BUCKET"] | |
| path ="scoring.xlsx" | |
| scoring = read_excel_s3(key, secret_key, bucket, path) | |
| return scoring | |
| # @log | |
| def general(): | |
| with open("Data/lotties/99268-laading-22.json", "r") as f: | |
| spinner = json.load(f) | |
| cols1, cols2= st.sidebar.columns((3,1)) | |
| place = cols2.empty() | |
| with place: | |
| st_lottie(spinner) | |
| # scoring.index = scoring['Ticker'] | |
| button_style() | |
| scoring_completo = read_scoring() | |
| col1, col2, col3 = st.columns((4,1,1)) | |
| col1.write("Last Update: " + scoring_completo.iloc[0]["TODAY"]) | |
| col1.write("Valores en MM USD") | |
| place1=col2.empty() | |
| place2 =col3.empty() | |
| scoring_completo = scoring_completo.drop(columns=["TODAY"]) | |
| scoring_completo[["Nota", 'W Latino', "W Small", 'Nota ESG']] = scoring_completo[["Nota", "W Latino", 'W Small', 'Nota ESG']].fillna(0) | |
| scoring = scoring_completo.copy() | |
| # convert just columns "a" and "b" | |
| metrics = ['MOMENTUM Precio', 'MOMENTUM Fundamental', | |
| 'VALUE','PROF', 'Distres_Prom', 'PROF', 'Delta 1M', 'QUALITY', 'Nota ESG'] | |
| metrics2 = ['Market_Cap'] + metrics | |
| scoring[metrics2] = scoring[metrics2].round() | |
| scoring[["W Latino", "W Small", "BM Latino", "BM Small"]] = scoring[["W Latino", "W Small", "BM Latino", "BM Small"]] * 100 | |
| scoring[["W Latino", "W Small", "BM Latino", "BM Small"]] = scoring[["W Latino", "W Small", "BM Latino", "BM Small"]].round(2).fillna(0) | |
| metrics_aggrid = ['Ticker', 'Portfolio_Country', 'LV1', 'Market_Cap', | |
| 'ADTV','Large/Small', 'Delta 1M','Distres_Prom', 'MOMENTUM Precio', 'MOMENTUM Fundamental', | |
| 'VALUE','PROF', 'QUALITY', 'Score', "Nota", | |
| 'Nota ESG', 'Stop Loss', "W Latino", "W Small", "BM Latino","BM Small"] | |
| scoring = scoring[metrics_aggrid] | |
| scoring[metrics_aggrid] = scoring[metrics_aggrid].replace(np.nan, -1) | |
| metrics3 = ['TICKER', 'COUNTRY', 'LV1', 'MKT CAP', 'ADTV', 'L/S', 'Δ 1M', | |
| 'DISTRES', 'MOM PREC', | |
| 'MOM FUND', 'VALUE','PROF', 'QUALITY', 'SCORE', | |
| "NOTA", 'ESG', 'STOP LOSS', "W LAT", "W SMALL", "BM LAT", "BM SMALL"] | |
| metrics4 = [ | |
| 'MOM PREC', 'MOM FUND', 'VALUE', 'QUALITY','DISTRES', 'PROF'] | |
| scoring.columns = metrics3 | |
| button = st.button("Refresh") | |
| r = display_table(scoring, 'TICKER') | |
| rad = 'Home' | |
| if button: | |
| st.experimental_memo.clear() | |
| st.experimental_rerun() | |
| with place1: | |
| link = get_table_excel_link(scoring_completo, "Scoring completo") | |
| st.markdown(link, unsafe_allow_html=True) | |
| with place2: | |
| link2 = get_table_excel_link(scoring, "Scoring resumen") | |
| st.markdown(link2, unsafe_allow_html=True) | |
| w_lat = [] | |
| w_small = [] | |
| bm_lat = [] | |
| bm_small = [] | |
| col1, col2, col3, col4, col5 = st.columns((1,1,1,1,3)) | |
| large = col1.checkbox("LUXMEXEQ", True) | |
| small = col2.checkbox("LUXLATSML", True) | |
| m1la = col3.checkbox("M1LA") | |
| msm = col4.checkbox("MSLUELAN") | |
| if "large" not in st.session_state: | |
| st.session_state.large=False | |
| if "small" not in st.session_state: | |
| st.session_state.small=False | |
| if "bm_sm" not in st.session_state: | |
| st.session_state.bm_sm=False | |
| if "bm_lat" not in st.session_state: | |
| st.session_state.bm_lat=False | |
| if large: | |
| st.session_state.large = True | |
| else: | |
| st.session_state.large = False | |
| if small: | |
| st.session_state.small =True | |
| else: | |
| st.session_state.small = False | |
| if m1la: | |
| st.session_state.bm_lat = True | |
| else: | |
| st.session_state.bm_lat = False | |
| if msm: | |
| st.session_state.bm_sm =True | |
| else: | |
| st.session_state.bm_sm = False | |
| col1, col2, col3 = st.columns((2.5, 1, 1)) | |
| try: | |
| series_data = [] | |
| names=[] | |
| for metric in metrics4: | |
| w_lat.append((scoring[metric]*scoring["W LAT"]/100).sum()) | |
| w_small.append((scoring[metric]*scoring["W SMALL"]/100).sum()) | |
| bm_lat.append((scoring[metric]*scoring["BM LAT"]/100).sum()) | |
| bm_small.append((scoring[metric]*scoring["BM SMALL"]/100).sum()) | |
| if st.session_state.large: | |
| series_data.append({"value":w_lat, | |
| "name": "LUXMEXEQ"}) | |
| names.append("LUXMEXEQ") | |
| if st.session_state.small: | |
| series_data.append({"value":w_small, | |
| "name": "LUXLATSML"}) | |
| names.append("LUXLATSML") | |
| if st.session_state.bm_lat: | |
| series_data.append({"value":bm_lat, | |
| "name": "M1LA"}) | |
| names.append("M1LA") | |
| if st.session_state.bm_sm: | |
| series_data.append({"value":bm_small, | |
| "name": "MSLUELAN"}) | |
| names.append("MSLUELAN") | |
| if len(r['selected_rows'])>0: | |
| for emp in r['selected_rows']: | |
| selected = emp.copy() | |
| name = selected['TICKER'] | |
| names.append(name) | |
| indicators = [] | |
| series_value = [] | |
| for met in metrics4: | |
| indicators.append({'name': met, "max": 100}) | |
| series_value.append(selected[met]) | |
| series_data.append({"value": series_value, | |
| "name": name}) | |
| with col2: | |
| st.metric('SCORE PROMEDIO - ' + name, | |
| int(np.array(series_value).mean())) | |
| else: | |
| indicators = [] | |
| for met in metrics4: | |
| indicators.append({'name': met, "max": 100}) | |
| option = { | |
| "title": {"text": 'Score'}, | |
| "legend": {"data": names}, | |
| "radar": { | |
| "indicator": indicators | |
| }, | |
| "series": [ | |
| { | |
| "name": "", | |
| "type": "radar", | |
| "data": series_data, | |
| } | |
| ], | |
| } | |
| with col1: | |
| st_echarts(option, height="400px", width="80%") | |
| except Exception as exc: | |
| st.write(exc) | |
| pass | |
| # st.image("img/Scoring.png", width="100%") | |
| place.empty() | |
| def diagrama(): | |
| import pandas as pd | |
| import pandas_profiling | |
| import streamlit as st | |
| from streamlit_pandas_profiling import st_profile_report | |
| st.image("img/Scoring.png") | |
| df = read_scoring() | |
| df=df[['MOMENTUM Precio','MOMENTUM Fundamental', 'VALUE','QUALITY', 'PROF','Distres_Prom','Score' ]] | |
| pr = df.profile_report() | |
| st_profile_report(pr) |