Spaces:
Sleeping
Sleeping
first full commit
Browse files- README.md +4 -4
- app.py +192 -128
- requirements.txt +8 -0
README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: streamlit
|
7 |
sdk_version: 1.10.0
|
8 |
app_file: app.py
|
|
|
1 |
---
|
2 |
+
title: Raci Create
|
3 |
+
emoji: ⚡
|
4 |
+
colorFrom: purple
|
5 |
+
colorTo: red
|
6 |
sdk: streamlit
|
7 |
sdk_version: 1.10.0
|
8 |
app_file: app.py
|
app.py
CHANGED
@@ -7,17 +7,17 @@ import numpy as np
|
|
7 |
import pandas as pd
|
8 |
import regex as re
|
9 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
|
12 |
# In[2]:
|
13 |
|
14 |
-
cobie_file = pd.ExcelFile(path_to_cobie_file)
|
15 |
|
16 |
-
cobie_floor_df = cobie_file.parse(sheet_name = 'Floor', dtype={'ExtIdentifier':str, 'Name':str})
|
17 |
-
cobie_space_df = cobie_file.parse(sheet_name = 'Space', dtype={'ExtIdentifier':str, 'Name':str})
|
18 |
-
cobie_type_df = cobie_file.parse(sheet_name = 'Type', dtype={'ExtIdentifier':str, 'Description':str, 'Name':str})
|
19 |
-
cobie_system_df = cobie_file.parse(sheet_name = 'System', dtype={'ExtIdentifier':str, 'Description':str, 'Name':str})
|
20 |
-
cobie_component_df = cobie_file.parse(sheet_name = 'Component', dtype={'ExtIdentifier':str, 'Space':str, 'Description':str, 'Name':str})
|
21 |
|
22 |
|
23 |
def decodeIfc(txt):
|
@@ -48,83 +48,182 @@ def convert_unicode_string(row, column_name):
|
|
48 |
return decodeIfc(row[column_name])
|
49 |
|
50 |
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
# In[3]:
|
62 |
|
63 |
-
cobie_type_df
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
|
|
|
|
|
|
|
|
|
|
82 |
|
83 |
|
84 |
# In[4]:
|
85 |
|
86 |
-
cobie_system_df
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
|
|
|
|
|
|
|
|
122 |
|
123 |
|
124 |
|
125 |
# In[5]:
|
126 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
# In[7]:
|
130 |
|
@@ -153,69 +252,34 @@ cobie_flat = cobie_flat[[
|
|
153 |
|
154 |
|
155 |
|
156 |
-
|
157 |
# In[ ]:
|
158 |
|
159 |
-
|
160 |
|
161 |
# In[ ]:
|
162 |
|
163 |
-
if
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
JOIN rnc_taxonomy tax
|
187 |
-
ON
|
188 |
-
tax.id = precomputed.taxid
|
189 |
-
WHERE
|
190 |
-
tax.lineage LIKE 'cellular organisms; Bacteria; %%'
|
191 |
-
AND precomputed.is_active = true -- exclude sequences without active cross-references
|
192 |
-
AND rna_type = 'rRNA'
|
193 |
-
'''
|
194 |
-
test_query2 = '''
|
195 |
-
SELECT
|
196 |
-
upi, -- RNAcentral URS identifier
|
197 |
-
taxid, -- NCBI taxid
|
198 |
-
ac -- external accession
|
199 |
-
FROM xref
|
200 |
-
WHERE ac IN ('OTTHUMT00000106564.1', 'OTTHUMT00000416802.1')
|
201 |
-
'''
|
202 |
-
rows = run_query(project_specific_responsibility_query)
|
203 |
-
raci = pd.read_sql_query(raci_query, get_connection(), params={'project_id':project_id})
|
204 |
-
# test_data = pd.read_sql_query(test_query2, get_connection())
|
205 |
-
|
206 |
-
# project_responsibilities = pd.read_sql_query(project_responsibility_query, db, params={'project_id':project_id})
|
207 |
-
# responsibility = pd.read_sql_query(responsibility_query, db, params={'project_id':project_id})
|
208 |
-
# project_responsibility_raci = pd.read_sql_query(project_responsibility_raci_query, db, params={'project_id':project_id})
|
209 |
-
# project_organisaion = pd.read_sql_query(project_organisaion_query, db, params={'project_id':project_id})
|
210 |
-
|
211 |
-
# st.dataframe(project_organisaion)
|
212 |
-
st.dataframe(raci)
|
213 |
-
|
214 |
-
create_template_table = st.text_input("test", key="test")
|
215 |
-
|
216 |
-
if create_template_table:
|
217 |
-
|
218 |
-
download_table = st.session_state.test
|
219 |
-
# raci_table = create_raci_table(project_responsibilities, responsibility, project_responsibility_raci)
|
220 |
|
221 |
|
|
|
7 |
import pandas as pd
|
8 |
import regex as re
|
9 |
import streamlit as st
|
10 |
+
import pickle
|
11 |
+
import io
|
12 |
+
import simplejson as json
|
13 |
+
import base64
|
14 |
+
import uuid
|
15 |
+
|
16 |
|
17 |
|
18 |
# In[2]:
|
19 |
|
|
|
20 |
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
|
23 |
def decodeIfc(txt):
|
|
|
48 |
return decodeIfc(row[column_name])
|
49 |
|
50 |
|
51 |
+
def decode_cobie(cobie_df):
|
52 |
+
|
53 |
+
columns_to_decode = ['Name', 'TypeName', 'Description']
|
54 |
+
for column_to_decode in columns_to_decode:
|
55 |
+
|
56 |
+
cobie_df[column_to_decode] = cobie_df.apply(
|
57 |
+
convert_unicode_string,
|
58 |
+
column_name=column_to_decode,
|
59 |
+
axis=1
|
60 |
+
)
|
61 |
+
|
62 |
+
return cobie_df
|
63 |
|
64 |
|
65 |
# In[3]:
|
66 |
|
67 |
+
def combine_type_component(cobie_type_df, cobie_type_component):
|
68 |
+
|
69 |
+
cobie_type_df.rename(columns={
|
70 |
+
'Name':'TypeName',
|
71 |
+
'ExtObject':'TypeExtObject',
|
72 |
+
'ExtIdentifier':'TypeExtIdentifier',
|
73 |
+
}, inplace=True)
|
74 |
+
|
75 |
+
cobie_type_component = pd.merge(
|
76 |
+
cobie_component_df[[
|
77 |
+
'Name','TypeName', 'Space',
|
78 |
+
'ExtObject', 'ExtIdentifier', 'SerialNumber',
|
79 |
+
]],
|
80 |
+
cobie_type_df[[
|
81 |
+
'TypeName', 'Category', 'Description',
|
82 |
+
'Manufacturer', 'ModelNumber',
|
83 |
+
'TypeExtObject', 'TypeExtIdentifier',
|
84 |
+
]],
|
85 |
+
on='TypeName',
|
86 |
+
how='left',
|
87 |
+
)
|
88 |
+
|
89 |
+
return cobie_type_component
|
90 |
+
|
91 |
|
92 |
|
93 |
# In[4]:
|
94 |
|
95 |
+
def combine_full_component_system(cobie_flat, cobie_system_df):
|
96 |
+
|
97 |
+
cobie_system_df.rename(columns={
|
98 |
+
'Name':'SystemName',
|
99 |
+
'Description':'SystemDescription',
|
100 |
+
'Category':'SystemCategory',
|
101 |
+
'ComponentNames':'Name',
|
102 |
+
}, inplace=True)
|
103 |
+
system_all = cobie_system_df.explode(column='Name')
|
104 |
+
|
105 |
+
cobie_flat = pd.merge(
|
106 |
+
cobie_type_component,
|
107 |
+
system_all[[
|
108 |
+
'SystemName', 'SystemDescription', 'SystemCategory',
|
109 |
+
'Name',
|
110 |
+
]],
|
111 |
+
on='Name',
|
112 |
+
how='left',
|
113 |
+
)
|
114 |
+
|
115 |
+
cobie_flat = cobie_flat[[
|
116 |
+
'Name',
|
117 |
+
'TypeName',
|
118 |
+
'Description',
|
119 |
+
'Category',
|
120 |
+
'SystemName',
|
121 |
+
'SystemDescription',
|
122 |
+
'SystemCategory',
|
123 |
+
'Space',
|
124 |
+
'ExtObject',
|
125 |
+
'ExtIdentifier',
|
126 |
+
'SerialNumber',
|
127 |
+
'Manufacturer',
|
128 |
+
'ModelNumber',
|
129 |
+
'TypeExtObject',
|
130 |
+
'TypeExtIdentifier',
|
131 |
+
|
132 |
+
]]
|
133 |
+
|
134 |
+
return cobie_flat
|
135 |
|
136 |
|
137 |
|
138 |
# In[5]:
|
139 |
|
140 |
+
def download_button(object_to_download, download_filename, button_text, pickle_it=False):
|
141 |
+
"""
|
142 |
+
Generates a link to download the given object_to_download.
|
143 |
+
Params:
|
144 |
+
------
|
145 |
+
object_to_download: The object to be downloaded.
|
146 |
+
download_filename (str): filename and extension of file. e.g. mydata.csv,
|
147 |
+
some_txt_output.txt download_link_text (str): Text to display for download
|
148 |
+
link.
|
149 |
+
button_text (str): Text to display on download button (e.g. 'click here to download file')
|
150 |
+
pickle_it (bool): If True, pickle file.
|
151 |
+
Returns:
|
152 |
+
-------
|
153 |
+
(str): the anchor tag to download object_to_download
|
154 |
+
Examples:
|
155 |
+
--------
|
156 |
+
download_link(your_df, 'YOUR_DF.csv', 'Click to download data!')
|
157 |
+
download_link(your_str, 'YOUR_STRING.txt', 'Click to download text!')
|
158 |
+
"""
|
159 |
+
if pickle_it:
|
160 |
+
try:
|
161 |
+
object_to_download = pickle.dumps(object_to_download)
|
162 |
+
except pickle.PicklingError as e:
|
163 |
+
st.write(e)
|
164 |
+
return None
|
165 |
|
166 |
+
else:
|
167 |
+
if isinstance(object_to_download, bytes):
|
168 |
+
pass
|
169 |
+
|
170 |
+
elif isinstance(object_to_download, pd.DataFrame):
|
171 |
+
#object_to_download = object_to_download.to_csv(index=False)
|
172 |
+
towrite = io.BytesIO()
|
173 |
+
object_to_download = object_to_download.to_excel(
|
174 |
+
towrite,
|
175 |
+
encoding='utf-8',
|
176 |
+
index=False,
|
177 |
+
header=True,
|
178 |
+
na_rep=''
|
179 |
+
)
|
180 |
+
towrite.seek(0)
|
181 |
+
|
182 |
+
# Try JSON encode for everything else
|
183 |
+
else:
|
184 |
+
object_to_download = json.dumps(object_to_download)
|
185 |
+
|
186 |
+
try:
|
187 |
+
# some strings <-> bytes conversions necessary here
|
188 |
+
b64 = base64.b64encode(object_to_download.encode()).decode()
|
189 |
+
|
190 |
+
except AttributeError as e:
|
191 |
+
b64 = base64.b64encode(towrite.read()).decode()
|
192 |
+
|
193 |
+
button_uuid = str(uuid.uuid4()).replace('-', '')
|
194 |
+
button_id = re.sub('\d+', '', button_uuid)
|
195 |
+
|
196 |
+
custom_css = f"""
|
197 |
+
<style>
|
198 |
+
#{button_id} {{
|
199 |
+
display: inline-flex;
|
200 |
+
align-items: center;
|
201 |
+
justify-content: center;
|
202 |
+
background-color: rgb(255, 255, 255);
|
203 |
+
color: rgb(38, 39, 48);
|
204 |
+
padding: .25rem .75rem;
|
205 |
+
position: relative;
|
206 |
+
text-decoration: none;
|
207 |
+
border-radius: 4px;
|
208 |
+
border-width: 1px;
|
209 |
+
border-style: solid;
|
210 |
+
border-color: rgb(230, 234, 241);
|
211 |
+
border-image: initial;
|
212 |
+
}}
|
213 |
+
#{button_id}:hover {{
|
214 |
+
border-color: rgb(246, 51, 102);
|
215 |
+
color: rgb(246, 51, 102);
|
216 |
+
}}
|
217 |
+
#{button_id}:active {{
|
218 |
+
box-shadow: none;
|
219 |
+
background-color: rgb(246, 51, 102);
|
220 |
+
color: white;
|
221 |
+
}}
|
222 |
+
</style> """
|
223 |
+
|
224 |
+
dl_link = custom_css + f'<a download="{download_filename}" id="{button_id}" href="data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,{b64}">{button_text}</a><br></br>'
|
225 |
+
|
226 |
+
return dl_link
|
227 |
|
228 |
# In[7]:
|
229 |
|
|
|
252 |
|
253 |
|
254 |
|
|
|
255 |
# In[ ]:
|
256 |
|
257 |
+
cobie_file_button = st.text_input("Dropbox link to COBie file", key="cobie_file_button")
|
258 |
|
259 |
# In[ ]:
|
260 |
|
261 |
+
if cobie_file_button:
|
262 |
+
|
263 |
+
cobie_file_path = st.session_state.cobie_file_button
|
264 |
+
|
265 |
+
if '=0' in cobie_file_path:
|
266 |
+
|
267 |
+
cobie_file_path = cobie_file_path.replace('=0', '=1')
|
268 |
+
|
269 |
+
cobie_file = pd.ExcelFile(cobie_file_path)
|
270 |
+
cobie_floor_df = cobie_file.parse(sheet_name = 'Floor', dtype={'ExtIdentifier':str, 'Name':str})
|
271 |
+
cobie_space_df = cobie_file.parse(sheet_name = 'Space', dtype={'ExtIdentifier':str, 'Name':str})
|
272 |
+
cobie_type_df = cobie_file.parse(sheet_name = 'Type', dtype={'ExtIdentifier':str, 'Description':str, 'Name':str})
|
273 |
+
cobie_system_df = cobie_file.parse(sheet_name = 'System', dtype={'ExtIdentifier':str, 'Description':str, 'Name':str})
|
274 |
+
cobie_component_df = cobie_file.parse(sheet_name = 'Component', dtype={'ExtIdentifier':str, 'Space':str, 'Description':str, 'Name':str})
|
275 |
+
|
276 |
+
|
277 |
+
cobie_type_component = combine_type_component(cobie_type_df, cobie_component_df)
|
278 |
+
cobie_flat = combine_full_component_system(cobie_type_component, cobie_system_df)
|
279 |
+
cobie_flat = decode_cobie(cobie_flat)
|
280 |
+
|
281 |
+
file_name = 'cobie_flat.xlsx'
|
282 |
+
download_button_str = download_button(cobie_flat, file_name, f'Click here to download {file_name}', pickle_it=False)
|
283 |
+
st.markdown(download_button_str, unsafe_allow_html=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
|
285 |
|
requirements.txt
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
streamlit
|
2 |
+
pandas
|
3 |
+
numpy
|
4 |
+
openpyxl
|
5 |
+
simplejson
|
6 |
+
# sqlalchemy
|
7 |
+
# psycopg2-binary
|
8 |
+
# https://www.dropbox.com/s/9yfqtw4qldma1kf/ifc-0.3.18h.tar.gz?dl=1
|