A-O98 commited on
Commit
440eaa6
·
1 Parent(s): 2cdce84

new updates

Browse files
Files changed (9) hide show
  1. app.py +7 -7
  2. fields_asim.parquet +0 -0
  3. fields_dfr4.parquet +0 -0
  4. main.py +9 -8
  5. pag/add_field.py +1 -1
  6. pag/edit.py +87 -30
  7. pag/moniter.py +0 -561
  8. pag/monitor.py +2 -297
  9. test.py +0 -43
app.py CHANGED
@@ -3,8 +3,8 @@ import streamlit as st
3
  import authentication
4
  import streamlit as st
5
  from pag import add_field, edit, monitor
6
-
7
- # from pages import add_field, edit, moniter
8
  def authenticate_user():
9
  st.title("Welcome to :orange[Field Monitoring App]")
10
  st.markdown("""
@@ -28,22 +28,22 @@ def authenticate_user():
28
  def main():
29
 
30
  if "authenticated" not in st.session_state:
31
- st.session_state.authenticated = False
32
 
33
  if st.session_state.authenticated:
34
- st.sidebar.title(":blue[Field Management Options]")
35
  options = st.sidebar.radio("Choose an option:",
36
- ("Add Field", "Edit Fields/Add Field Info", "Monitor"))
37
 
38
  if options == "Add Field":
39
  st.title("Welcome to :orange[Field Monitoring App]")
40
 
41
  add_field.add_drawing()
42
 
43
- elif options == "Edit Fields/Add Field Info":
44
  st.title("Welcome to :orange[Field Monitoring App]")
45
  edit.edit_fields()
46
- elif options == "Monitor":
47
  st.title("Welcome to :orange[Field Monitoring App]")
48
  monitor.monitor_fields()
49
  else:
 
3
  import authentication
4
  import streamlit as st
5
  from pag import add_field, edit, monitor
6
+
7
+
8
  def authenticate_user():
9
  st.title("Welcome to :orange[Field Monitoring App]")
10
  st.markdown("""
 
28
  def main():
29
 
30
  if "authenticated" not in st.session_state:
31
+ st.session_state.authenticated = False
32
 
33
  if st.session_state.authenticated:
34
+ st.sidebar.title("Navigation")
35
  options = st.sidebar.radio("Choose an option:",
36
+ ("Add Field", "Manage Fields", "Monitor Fields"))
37
 
38
  if options == "Add Field":
39
  st.title("Welcome to :orange[Field Monitoring App]")
40
 
41
  add_field.add_drawing()
42
 
43
+ elif options == "Manage Fields":
44
  st.title("Welcome to :orange[Field Monitoring App]")
45
  edit.edit_fields()
46
+ elif options == "Monitor Fields":
47
  st.title("Welcome to :orange[Field Monitoring App]")
48
  monitor.monitor_fields()
49
  else:
fields_asim.parquet CHANGED
Binary files a/fields_asim.parquet and b/fields_asim.parquet differ
 
fields_dfr4.parquet DELETED
Binary file (8.54 kB)
 
main.py CHANGED
@@ -2,9 +2,9 @@
2
  import streamlit as st
3
  import authentication
4
  import streamlit as st
5
- from pag import add_field, edit, moniter
6
-
7
- # from pages import add_field, edit, moniter
8
  def authenticate_user():
9
  st.title("Welcome to :orange[Field Monitoring App]")
10
  st.markdown("""
@@ -12,6 +12,7 @@ def authenticate_user():
12
  .stSelectbox > div > div {cursor: pointer;}
13
  </style>
14
  """, unsafe_allow_html=True)
 
15
  if not st.session_state.authenticated:
16
  choice = st.selectbox("Interested? Sign up or log in if you have an account",options=["Home","Login","SignUp"])
17
 
@@ -28,24 +29,24 @@ def authenticate_user():
28
  def main():
29
 
30
  if "authenticated" not in st.session_state:
31
- st.session_state.authenticated = False
32
 
33
  if st.session_state.authenticated:
34
  st.sidebar.title("Navigation")
35
  options = st.sidebar.radio("Choose an option:",
36
- ("Add Field", "Edit", "Monitor"))
37
 
38
  if options == "Add Field":
39
  st.title("Welcome to :orange[Field Monitoring App]")
40
 
41
  add_field.add_drawing()
42
 
43
- elif options == "Edit":
44
  st.title("Welcome to :orange[Field Monitoring App]")
45
  edit.edit_fields()
46
- elif options == "Monitor":
47
  st.title("Welcome to :orange[Field Monitoring App]")
48
- moniter.monitor_fields()
49
  else:
50
  authenticate_user()
51
  if __name__ == "__main__":
 
2
  import streamlit as st
3
  import authentication
4
  import streamlit as st
5
+ from pag import add_field, edit, monitor
6
+
7
+
8
  def authenticate_user():
9
  st.title("Welcome to :orange[Field Monitoring App]")
10
  st.markdown("""
 
12
  .stSelectbox > div > div {cursor: pointer;}
13
  </style>
14
  """, unsafe_allow_html=True)
15
+
16
  if not st.session_state.authenticated:
17
  choice = st.selectbox("Interested? Sign up or log in if you have an account",options=["Home","Login","SignUp"])
18
 
 
29
  def main():
30
 
31
  if "authenticated" not in st.session_state:
32
+ st.session_state.authenticated = False
33
 
34
  if st.session_state.authenticated:
35
  st.sidebar.title("Navigation")
36
  options = st.sidebar.radio("Choose an option:",
37
+ ("Add Field", "Manage Fields", "Monitor Fields"))
38
 
39
  if options == "Add Field":
40
  st.title("Welcome to :orange[Field Monitoring App]")
41
 
42
  add_field.add_drawing()
43
 
44
+ elif options == "Manage Fields":
45
  st.title("Welcome to :orange[Field Monitoring App]")
46
  edit.edit_fields()
47
+ elif options == "Monitor Fields":
48
  st.title("Welcome to :orange[Field Monitoring App]")
49
+ monitor.monitor_fields()
50
  else:
51
  authenticate_user()
52
  if __name__ == "__main__":
pag/add_field.py CHANGED
@@ -54,7 +54,7 @@ def display_map_and_drawing_controls(m, center_start):
54
  zoom_start = 13
55
  if st.session_state['active_drawing'] is None:
56
  st.info("IMPORTANT: Click on the drawing to confirm the drawn field", icon="🚨")
57
- sat_basemap = utils.basemaps['Google Satellite']
58
  sat_basemap.add_to(m)
59
  folium.LayerControl().add_to(m)
60
  output = st_folium(m, center=center_start, zoom=zoom_start, key="new", width=800)
 
54
  zoom_start = 13
55
  if st.session_state['active_drawing'] is None:
56
  st.info("IMPORTANT: Click on the drawing to confirm the drawn field", icon="🚨")
57
+ sat_basemap = utils.basemaps['Google Satellite Hybrid'] # Change this line to use 'Google Satellite Hybrid'
58
  sat_basemap.add_to(m)
59
  folium.LayerControl().add_to(m)
60
  output = st_folium(m, center=center_start, zoom=zoom_start, key="new", width=800)
pag/edit.py CHANGED
@@ -5,10 +5,9 @@ import geopandas as gpd
5
  from streamlit_folium import st_folium, folium_static
6
  from authentication import greeting, check_password
7
  import folium
 
8
  import time
9
- def check_authentication():
10
- if not check_password():
11
- st.stop()
12
 
13
 
14
  def add_properties(df, col_name, value, field_name):
@@ -37,17 +36,24 @@ def read_custom_property():
37
  return custom_property_name, custom_property_value
38
 
39
 
40
-
41
-
42
-
43
  def edit_fields():
44
- current_user = greeting("Changed your mind? Edit , Add or Delete Fields easily")
45
- file_path = f"fields_{current_user}.parquet"
46
- if os.path.exists(file_path):
47
- gdf = gpd.read_parquet(file_path)
 
 
 
48
  else:
49
- st.info("No Fields Added Yet!")
50
  return
 
 
 
 
 
 
 
51
  st.info("Hover over the field to show the properties or check the Existing Fields List below")
52
  fields_map = gdf.explore()
53
  sat_basemap = utils.basemaps['Google Satellite']
@@ -56,32 +62,27 @@ def edit_fields():
56
  folium_static(fields_map, height=300, width=600)
57
 
58
  with st.expander("Existing Fields List", expanded=False):
 
59
  st.write(gdf)
60
 
61
  field_name = select_field(gdf)
62
  if field_name == "Select Field":
63
  st.info("No Field Selected Yet!")
64
-
65
  else:
66
- delete_edit = st.radio("Delete or Edit Field?", options=["View", "Edit", "Delete"], key="delete_edit", help="Select the operation to perform")
67
- if delete_edit == "View":
 
68
  field = gdf[gdf['name'] == field_name]
69
  st.write(field)
70
- elif delete_edit == "Delete":
71
- delete = st.button("Delete Field", key="delete", help="Click to Delete Field", type="primary", use_container_width=True)
72
- if delete:
73
- if len(gdf) == 1 and (gdf['name'] == field_name).all(): # Check if this is the only field left
74
- os.remove(file_path) # Delete the .parquet file if it's the last field
75
- st.success("All fields deleted. The data file has been removed.")
76
- time.sleep(0.3)
77
- st.rerun()
78
- else:
79
- gdf = gdf[gdf['name'] != field_name]
80
- gdf.to_parquet(file_path)
81
- st.success("Field Deleted Successfully!")
82
- time.sleep(0.3)
83
- st.rerun()
84
- else:
85
  no_input = True
86
  crop_type = read_crop_type()
87
  irrigation_type = read_irrigation_type()
@@ -103,8 +104,64 @@ def edit_fields():
103
  st.success("Field Information Updated Successfully!")
104
  st.info("Please Select View above to see the updated field information")
105
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
 
107
  if __name__ == '__main__':
108
- check_authentication()
109
 
110
  edit_fields()
 
5
  from streamlit_folium import st_folium, folium_static
6
  from authentication import greeting, check_password
7
  import folium
8
+ import json
9
  import time
10
+ import pandas as pd
 
 
11
 
12
 
13
  def add_properties(df, col_name, value, field_name):
 
36
  return custom_property_name, custom_property_value
37
 
38
 
 
 
 
39
  def edit_fields():
40
+ current_user = greeting("Manage your fields")
41
+ fields_file_path = f"fields_{current_user}.parquet"
42
+ history_file_path = f"history_{current_user}.csv"
43
+
44
+ # Load or initialize the GeoDataFrame for fields
45
+ if os.path.exists(fields_file_path):
46
+ gdf = gpd.read_parquet(fields_file_path)
47
  else:
48
+ st.info("No fields added yet!")
49
  return
50
+
51
+ # Load or initialize the DataFrame for field usage history
52
+ if os.path.exists(history_file_path):
53
+ history_df = pd.read_csv(history_file_path)
54
+ else:
55
+ history_df = pd.DataFrame(columns=['field_name', 'start_date', 'end_date', 'crop', 'irrigation_method'])
56
+
57
  st.info("Hover over the field to show the properties or check the Existing Fields List below")
58
  fields_map = gdf.explore()
59
  sat_basemap = utils.basemaps['Google Satellite']
 
62
  folium_static(fields_map, height=300, width=600)
63
 
64
  with st.expander("Existing Fields List", expanded=False):
65
+ # lis = [(f"Name:{gdf.iloc[i]['name']}",f"location: {gdf.iloc[i]['geometry']}" )for i in range(len(gdf))]
66
  st.write(gdf)
67
 
68
  field_name = select_field(gdf)
69
  if field_name == "Select Field":
70
  st.info("No Field Selected Yet!")
 
71
  else:
72
+ st.subheader(field_name)
73
+ option_menu = st.radio(f"Please add your {field_name} field information, historical data will help train our AI model", options=["View Field Info", "Add Field Information","Add Field Cultivation History", "Delete"], key="option_menu", help="Select the operation to perform")
74
+ if option_menu == "View Field Info":
75
  field = gdf[gdf['name'] == field_name]
76
  st.write(field)
77
+ # Deserialize the usage history for display
78
+ if len(history_df)>0:
79
+ st.write("Previous cultivation History:", history_df)
80
+ else:
81
+ st.subheader("No cultivation history added for this field.")
82
+
83
+ elif option_menu == "Add Field Information":
84
+
85
+
 
 
 
 
 
 
86
  no_input = True
87
  crop_type = read_crop_type()
88
  irrigation_type = read_irrigation_type()
 
104
  st.success("Field Information Updated Successfully!")
105
  st.info("Please Select View above to see the updated field information")
106
 
107
+ elif option_menu == "Add Field Cultivation History":
108
+ with st.form(key='history_form', clear_on_submit=True):
109
+ start_date = st.date_input("Cultivation Start Date", key=f'start_date')
110
+ end_date = st.date_input("Cultivation End Date", key=f'end_date')
111
+ crop_planted = st.selectbox("Type of Crop Planted", [' ', 'Wheat', 'Corn', 'Rice',"other"], index=0)
112
+
113
+ irrigation_method = st.selectbox("Irrigation Method Used", ['Rainfed', 'Irrigated', " "], index=2)
114
+ submit_history = st.form_submit_button("Submit Crop Cycle")
115
+ if submit_history:
116
+ # Check that the start date is before the end date
117
+ if start_date < end_date:
118
+ # Append new usage entry
119
+ new_history = {
120
+ 'field_name': field_name,
121
+ 'start_date': str(start_date),
122
+ 'end_date': str(end_date),
123
+ 'crop': crop_planted,
124
+ 'irrigation_method': irrigation_method
125
+ }
126
+ # Use concat instead of append
127
+ history_df = pd.concat([history_df, pd.DataFrame([new_history])], ignore_index=True)
128
+ history_df.to_csv(history_file_path, index=False)
129
+ st.success("Field usage history updated successfully!, fill the form again to add another cultivation history" )
130
+
131
+ else:
132
+ st.write("check the entered dates")
133
+
134
+ elif option_menu == "Delete":
135
+ option = st.selectbox("What do you want to delete", options=[f'Delete {field_name} Field', 'Delete a historical entry from the field'])
136
+
137
+ if option == f"Delete {field_name} Field" :
138
+ delete = st.button("Delete Entire Field", key="delete_field", help="Click to Delete Field", type="primary", use_container_width=True)
139
+ if delete:
140
+ if len(gdf) == 1 and (gdf['name'] == field_name).all(): # Check if this is the only field left
141
+ os.remove(fields_file_path) # Delete the .parquet file if it's the last field
142
+ os.remove(history_file_path)
143
+ st.success("All fields deleted. The data file has been removed.")
144
+ time.sleep(0.3)
145
+ st.rerun()
146
+ else:
147
+ gdf = gdf[gdf['name'] != field_name]
148
+ history_df = history_df[history_df["field_name"] != field_name ]
149
+ gdf.to_parquet(fields_file_path)
150
+ history_df.to_csv(history_file_path, index=False)
151
+ st.success("Field Deleted Successfully!")
152
+ time.sleep(0.3)
153
+ st.rerun()
154
+ elif option == "Delete a historical entry from the field":
155
+ # Allow the user to select which historical entry to delete
156
+ idx_history_to_delete = st.selectbox("Select a history to delete, select the index of the entry that you want to delete", options=history_df[history_df['field_name'] == field_name].index)
157
+ if st.button("Confirm Delete Historical Entry", key="delete_history", help="Click to Delete Entry", type="primary", use_container_width=True):
158
+ history_df.drop(labels=0, axis=0, index=None, columns=None, level=None, inplace=True, errors='raise')
159
+ history_df.to_csv(history_file_path, index=False)
160
+
161
+ st.success("Entry Deleted Successfully!")
162
+ time.sleep(0.3)
163
+ st.rerun()
164
 
165
  if __name__ == '__main__':
 
166
 
167
  edit_fields()
pag/moniter.py DELETED
@@ -1,561 +0,0 @@
1
- import os
2
- import utils
3
- import streamlit as st
4
- import geopandas as gpd
5
- from streamlit_folium import st_folium, folium_static
6
- from authentication import greeting, check_password
7
- import folium
8
- from senHub import SenHub
9
- from datetime import datetime
10
- from sentinelhub import SHConfig, MimeType
11
- import requests
12
- import process
13
- import joblib
14
- from zipfile import ZipFile
15
- import matplotlib.pyplot as plt
16
- from plotly.subplots import make_subplots
17
- import plotly.graph_objects as go
18
-
19
- def check_authentication():
20
- if not check_password():
21
- st.stop()
22
-
23
-
24
-
25
- config = SHConfig()
26
- config.instance_id = '6c220beb-90c4-4131-b658-10cddd8d97b9'
27
- config.sh_client_id = '17e7c154-7f2d-4139-b1af-cef762385079'
28
- config.sh_client_secret = 'KvbQMKZB85ZWEgWuxqiWIVEvTAQEfoF9'
29
-
30
-
31
- def select_field(gdf):
32
- names = gdf['name'].tolist()
33
- names.append("Select Field")
34
- field_name = st.selectbox("Select Field", options=names, key="field_name_monitor", help="Select the field to edit", index=len(names)-1)
35
- return field_name
36
-
37
-
38
- def calculate_bbox(df, field):
39
- bbox = df.loc[df['name'] == field].bounds
40
- r = bbox.iloc[0]
41
- return [r.minx, r.miny, r.maxx, r.maxy]
42
-
43
- def get_available_dates_for_field(df, field, year, start_date='', end_date=''):
44
- bbox = calculate_bbox(df, field)
45
- token = SenHub(config).token
46
- headers = utils.get_bearer_token_headers(token)
47
- if start_date == '' or end_date == '':
48
- start_date = f'{year}-01-01'
49
- end_date = f'{year}-12-31'
50
- data = f'{{ "collections": [ "sentinel-2-l2a" ], "datetime": "{start_date}T00:00:00Z/{end_date}T23:59:59Z", "bbox": {bbox}, "limit": 100, "distinct": "date" }}'
51
- response = requests.post('https://services.sentinel-hub.com/api/v1/catalog/search', headers=headers, data=data)
52
- try:
53
- features = response.json()['features']
54
- except:
55
- print(response.json())
56
- features = []
57
- return features
58
-
59
- @st.cache_data
60
- def get_and_cache_available_dates(_df, field, year, start_date, end_date):
61
- dates = get_available_dates_for_field(_df, field, year, start_date, end_date)
62
- print(f'Caching Dates for {field}')
63
- return dates
64
-
65
-
66
-
67
-
68
- def get_cuarted_df_for_field(df, field, date, metric, clientName):
69
- curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
70
- if curated_date_path is not None:
71
- curated_df = gpd.read_file(curated_date_path)
72
- else:
73
- process.Download_image_in_given_date(clientName, metric, df, field, date)
74
- process.mask_downladed_image(clientName, metric, df, field, date)
75
- process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
76
- curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
77
- curated_df = gpd.read_file(curated_date_path)
78
- return curated_df
79
-
80
-
81
-
82
-
83
-
84
-
85
-
86
-
87
- def get_cuarted_df_for_field(df, field, date, metric, clientName):
88
- curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
89
- if curated_date_path is not None:
90
- curated_df = gpd.read_file(curated_date_path)
91
- else:
92
- process.Download_image_in_given_date(clientName, metric, df, field, date)
93
- process.mask_downladed_image(clientName, metric, df, field, date)
94
- process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
95
- curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
96
- curated_df = gpd.read_file(curated_date_path)
97
- return curated_df
98
-
99
- def track(metric, field_name, src_df, client_name):
100
-
101
- dates = []
102
- date = -1
103
- if 'dates' not in st.session_state:
104
- st.session_state['dates'] = dates
105
- else:
106
- dates = st.session_state['dates']
107
- if 'date' not in st.session_state:
108
- st.session_state['date'] = date
109
- else:
110
- date = st.session_state['date']
111
-
112
- # Give the user the option to select year, start date and end date
113
- # with st.expander('Select Year, Start Date and End Date'):
114
- # # Get the year
115
- # years = [f'20{i}' for i in range(22, 25)]
116
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}')
117
-
118
- # # Set the min, max and default values for start and end dates
119
- # min_val = f'{year}-01-01'
120
- # max_val = f'{year}-12-31'
121
- # default_val = f'{year}-11-01'
122
- # min_val = datetime.strptime(min_val, '%Y-%m-%d')
123
- # max_val = datetime.strptime(max_val, '%Y-%m-%d')
124
- # default_val = datetime.strptime(default_val, '%Y-%m-%d')
125
-
126
- # # Get the start and end dates
127
- # start_date = st.date_input('Start Date', value=default_val, min_value=min_val, max_value=max_val, key=f'Start Date - {metric}')
128
- # end_date = st.date_input('End Date', value=max_val, min_value=min_val, max_value=max_val, key=f'End Date - {metric}')
129
-
130
-
131
- # Get the dates with available data for that field when the user clicks the button
132
- # get_dates_button = st.button(f'Get Dates for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
133
- # key=f'Get Dates Button - {metric}',
134
- # help='Click to get the dates with available data for the selected field',
135
- # use_container_width=True, type='primary')
136
- # if get_dates_button:
137
- if True:
138
- start_date = '2024-01-01'
139
- today = datetime.today()
140
- end_date = today.strftime('%Y-%m-%d')
141
- year = '2024'
142
-
143
- dates = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
144
- # Add None to the end of the list to be used as a default value
145
- # dates.append(-1)
146
- #sort the dates from earliest to today
147
- dates = sorted(dates)
148
-
149
- #Add the dates to the session state
150
- st.session_state['dates'] = dates
151
-
152
- # Display the dropdown menu
153
- if len(dates) > 0:
154
- date = st.selectbox('Select Observation Date: ', dates, index=len(dates)-1, key=f'Select Date Dropdown Menu - {metric}')
155
- if date != -1:
156
- st.write('You selected:', date)
157
- #Add the date to the session state
158
- st.session_state['date'] = date
159
- else:
160
- st.write('Please Select A Date')
161
- else:
162
- st.info('No dates available for the selected field and dates range, select a different range or click the button to fetch the dates again')
163
-
164
-
165
- st.markdown('---')
166
- st.header('Show Field Data')
167
-
168
- # If a field and a date are selected, display the field data
169
- if date != -1:
170
-
171
- # Get the field data at the selected date
172
- with st.spinner('Loading Field Data...'):
173
- # Get the metric data and cloud cover data for the selected field and date
174
- metric_data = get_cuarted_df_for_field(src_df, field_name, date, metric, client_name)
175
- cloud_cover_data = get_cuarted_df_for_field(src_df, field_name, date, 'CLP', client_name)
176
-
177
- #Merge the metric and cloud cover data on the geometry column
178
- field_data = metric_data.merge(cloud_cover_data, on='geometry')
179
-
180
- # Display the field data
181
- st.write(f'Field Data for {field_name} (Field ID: {field_name}) on {date}')
182
- st.write(field_data.head(2))
183
-
184
- #Get Avarage Cloud Cover
185
- avg_clp = field_data[f'CLP_{date}'].mean() *100
186
-
187
- # If the avarage cloud cover is greater than 80%, display a warning message
188
- if avg_clp > 80:
189
- st.warning(f'⚠️ The Avarage Cloud Cover is {avg_clp}%')
190
- st.info('Please Select A Different Date')
191
-
192
- ## Generate the field data Map ##
193
-
194
- #Title, Colormap and Legend
195
- title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {date}'
196
- cmap = 'RdYlGn'
197
-
198
- # Create a map of the field data
199
- field_data_map = field_data.explore(
200
- column=f'{metric}_{date}',
201
- cmap=cmap,
202
- legend=True,
203
- vmin=0,
204
- vmax=1,
205
- marker_type='circle', marker_kwds={'radius':5.3, 'fill':True})
206
-
207
- # Add Google Satellite as a base map
208
- google_map = utils.basemaps['Google Satellite']
209
- google_map.add_to(field_data_map)
210
-
211
- # Display the map
212
- st_folium(field_data_map, width = 725, key=f'Field Data Map - {metric}')
213
-
214
-
215
- #Dwonload Links
216
-
217
- # If the field data is not empty, display the download links
218
- if len(field_data) > 0:
219
- # Create two columns for the download links
220
- download_as_shp_col, download_as_tiff_col = st.columns(2)
221
-
222
- # Create a shapefile of the field data and add a download link
223
- with download_as_shp_col:
224
-
225
- #Set the shapefile name and path based on the field id, metric and date
226
- extension = 'shp'
227
- shapefilename = f"{field_name}_{metric}_{date}.{extension}"
228
- path = f'./shapefiles/{field_name}/{metric}/{extension}'
229
-
230
- # Create the target directory if it doesn't exist
231
- os.makedirs(path, exist_ok=True)
232
-
233
- # Save the field data as a shapefile
234
- field_data.to_file(f'{path}/{shapefilename}')
235
-
236
- # Create a zip file of the shapefile
237
- files = []
238
- for i in os.listdir(path):
239
- if os.path.isfile(os.path.join(path,i)):
240
- if i[0:len(shapefilename)] == shapefilename:
241
- files.append(os.path.join(path,i))
242
- zipFileName = f'{path}/{field_name}_{metric}_{date}.zip'
243
- zipObj = ZipFile(zipFileName, 'w')
244
- for file in files:
245
- zipObj.write(file)
246
- zipObj.close()
247
-
248
- # Add a download link for the zip file
249
- with open(zipFileName, 'rb') as f:
250
- st.download_button('Download as ShapeFile', f,file_name=zipFileName)
251
-
252
- # Get the tiff file path and create a download link
253
- with download_as_tiff_col:
254
- #get the tiff file path
255
- tiff_path = utils.get_masked_location_img_path(client_name, metric, date, field_name)
256
- # Add a download link for the tiff file
257
- donwnload_filename = f'{metric}_{field_name}_{date}.tiff'
258
- with open(tiff_path, 'rb') as f:
259
- st.download_button('Download as Tiff File', f,file_name=donwnload_filename)
260
-
261
- else:
262
- st.info('Please Select A Field and A Date')
263
-
264
-
265
- # st.markdown('---')
266
- # st.header('Show Historic Averages')
267
-
268
-
269
- # #Let the user select the year, start date and end date
270
- # with st.expander('Select Year, Start Date and End Date'):
271
- # # Get the year
272
- # years = [f'20{i}' for i in range(22, 25)]
273
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages')
274
-
275
- # # Set the start and end dates to the first and last dates of the year
276
- # start_date = f'{year}-01-01'
277
- # end_date = f'{year}-12-31'
278
-
279
- # # Get the dates for historic averages
280
- # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
281
-
282
- # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
283
- # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
284
- # historic_avarages_dates_for_field.sort()
285
- # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
286
-
287
- # # Get the number of dates
288
- # num_historic_dates = len(historic_avarages_dates_for_field)
289
- # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
290
-
291
- # # Display the historic averages when the user clicks the button
292
- # display_historic_avgs_button = st.button(f'Display Historic Averages for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
293
- # key=f'Display Historic Averages Button - {metric}',
294
- # help='Click to display the historic averages for the selected field',
295
- # use_container_width=True, type='primary')
296
-
297
- # # If the button is clicked, display the historic averages
298
- # if display_historic_avgs_button:
299
-
300
- # #Initlize the historic averages cache dir and file path
301
- # historic_avarages_cache_dir = './historic_avarages_cache'
302
- # historic_avarages_cache_path = f'{historic_avarages_cache_dir}/historic_avarages_cache.joblib'
303
- # historic_avarages_cache_clp_path = f'{historic_avarages_cache_dir}/historic_avarages_cache_clp.joblib'
304
-
305
- # # Load the historic averages cache if it exists, else create it
306
- # if os.path.exists(historic_avarages_cache_path):
307
- # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
308
- # else:
309
- # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
310
- # joblib.dump({}, historic_avarages_cache_path)
311
- # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
312
- # if os.path.exists(historic_avarages_cache_clp_path):
313
- # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
314
- # else:
315
- # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
316
- # joblib.dump({}, historic_avarages_cache_clp_path)
317
- # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
318
-
319
- # found_in_cache = False
320
- # if client_name not in historic_avarages_cache:
321
- # historic_avarages_cache[client_name] = {}
322
- # if metric not in historic_avarages_cache[client_name]:
323
- # historic_avarages_cache[client_name][metric] = {}
324
- # if field_name not in historic_avarages_cache[client_name][metric]:
325
- # historic_avarages_cache[client_name][metric][field_name] = {}
326
- # if year not in historic_avarages_cache[client_name][metric][field_name]:
327
- # historic_avarages_cache[client_name][metric][field_name][year] = {}
328
- # if len(historic_avarages_cache[client_name][metric][field_name][year]) > 0:
329
- # found_in_cache = True
330
-
331
-
332
- # #Check if the field and year are in the cache_clp for the current metric and client
333
- # found_in_cache_clp = False
334
- # if client_name not in historic_avarages_cache_clp:
335
- # historic_avarages_cache_clp[client_name] = {}
336
- # if 'CLP' not in historic_avarages_cache_clp[client_name]:
337
- # historic_avarages_cache_clp[client_name]['CLP'] = {}
338
- # if field_name not in historic_avarages_cache_clp[client_name]['CLP']:
339
- # historic_avarages_cache_clp[client_name]['CLP'][field_name] = {}
340
- # if year not in historic_avarages_cache_clp[client_name]['CLP'][field_name]:
341
- # historic_avarages_cache_clp[client_name]['CLP'][field_name][year] = {}
342
- # if len(historic_avarages_cache_clp[client_name]['CLP'][field_name][year]) > 0:
343
- # found_in_cache_clp = True
344
-
345
-
346
- # # If Found in cache, get the historic averages from the cache
347
- # if found_in_cache and found_in_cache_clp:
348
- # st.info('Found Historic Averages in Cache')
349
- # historic_avarages = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages']
350
- # historic_avarages_dates = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates']
351
- # historic_avarages_clp = historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp']
352
-
353
- # # Else, calculate the historic averages and add them to the cache
354
- # else:
355
- # st.info('Calculating Historic Averages...')
356
-
357
-
358
- # #Empty lists for the historic averages , dates and cloud cover
359
- # historic_avarages = []
360
- # historic_avarages_dates = []
361
- # historic_avarages_clp = []
362
-
363
- # # Get the historic averages
364
- # dates_for_field_bar = st.progress(0)
365
- # with st.spinner('Calculating Historic Averages...'):
366
- # with st.empty():
367
- # for i in range(num_historic_dates):
368
- # # Get the historic average for the current date
369
- # current_date = historic_avarages_dates_for_field[i]
370
- # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
371
- # current_df_clp = get_cuarted_df_for_field(src_df, field_name, current_date, 'CLP', client_name)
372
- # current_avg = current_df[f'{metric}_{current_date}'].mean()
373
- # current_avg_clp = current_df_clp[f'CLP_{current_date}'].mean()
374
- # # Add the historic average and date to the lists
375
- # historic_avarages.append(current_avg)
376
- # historic_avarages_dates.append(current_date)
377
- # historic_avarages_clp.append(current_avg_clp)
378
- # # Update the progress bar
379
- # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
380
-
381
- # # Create a plot of the historic averages with the cloud cover as dashed line and dates as x axis (rotated 90 degrees when needed)
382
- # fig, ax = plt.subplots(figsize=(5, 3))
383
-
384
- # # Set the x axis ticks and labels
385
- # x = historic_avarages_dates
386
- # x_ticks = [i for i in range(len(x))]
387
- # ax.set_xticks(x_ticks)
388
-
389
- # #Set rotation to 90 degrees if the number of dates is greater than 10
390
- # rot = 0 if len(x) < 10 else 90
391
- # ax.set_xticklabels(x, rotation=rot)
392
-
393
- # # Set the y axis ticks and labels
394
- # y1 = historic_avarages
395
- # y2 = historic_avarages_clp
396
- # y_ticks = [i/10 for i in range(11)]
397
- # ax.set_yticks(y_ticks)
398
- # ax.set_yticklabels(y_ticks)
399
-
400
- # # Plot the historic averages and cloud cover
401
- # ax.plot(x_ticks, y1, label=f'{metric} Historic Averages')
402
- # ax.plot(x_ticks, y2, '--', label='Cloud Cover')
403
- # ax.legend()
404
-
405
- # # Set the title and axis labels
406
- # ax.set_title(f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
407
- # ax.set_xlabel('Date')
408
- # ax.set_ylabel(f'{metric} Historic Averages')
409
-
410
- # # Display the plot
411
- # st.pyplot(fig, use_container_width=True)
412
-
413
- # # Add the historic averages to the cache
414
- # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages'] = historic_avarages
415
- # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates'] = historic_avarages_dates
416
- # historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp'] = historic_avarages_clp
417
- # # Save the cache
418
- # joblib.dump(historic_avarages_cache, historic_avarages_cache_path)
419
- # joblib.dump(historic_avarages_cache_clp, historic_avarages_cache_clp_path)
420
- # # Tell the user that the historic averages are saved in the cache
421
- # st.info('Historic Averages Saved in Cache')
422
- # st.write(f'Cache Path: {historic_avarages_cache_path}')
423
- # st.write(f'Cache CLP Path: {historic_avarages_cache_clp_path}')
424
-
425
-
426
- # # Display the historic averages in nice plotly plot
427
- # fig = make_subplots(specs=[[{"secondary_y": True}]])
428
-
429
- # # Add the historic averages to the plot
430
- # fig.add_trace(
431
- # go.Scatter(x=historic_avarages_dates, y=historic_avarages, name=f'{metric} Historic Averages'),
432
- # secondary_y=False,
433
- # )
434
-
435
- # # Add the cloud cover to the plot
436
- # fig.add_trace(
437
- # go.Scatter(x=historic_avarages_dates, y=historic_avarages_clp, name='Cloud Cover'),
438
- # secondary_y=True,
439
- # )
440
-
441
- # # Set the title and axis labels
442
- # fig.update_layout(title_text=f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
443
- # fig.update_xaxes(title_text='Date')
444
- # fig.update_yaxes(title_text=f'{metric} Historic Averages', secondary_y=False)
445
- # fig.update_yaxes(title_text='Cloud Cover', secondary_y=True)
446
-
447
- # # Display the plot
448
- # st.plotly_chart(fig)
449
-
450
-
451
- # st.markdown('---')
452
- # st.header('Show Historic GIF')
453
-
454
-
455
- # #Let the user select the year, start date and end date of the GIF
456
- # with st.expander('Select Year, Start Date and End Date of the GIF'):
457
- # # Get the year
458
- # years = [f'20{i}' for i in range(16, 23)]
459
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages GIF')
460
-
461
- # # Set the start and end dates to the first and last dates of the year
462
- # start_date = f'{year}-01-01'
463
- # end_date = f'{year}-12-31'
464
-
465
- # # Get the dates for historic GIF
466
- # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
467
-
468
- # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
469
- # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
470
- # historic_avarages_dates_for_field.sort()
471
- # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
472
-
473
- # # Get the number of dates
474
- # num_historic_dates = len(historic_avarages_dates_for_field)
475
- # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
476
-
477
- # # Display the historic GIF when the user clicks the button
478
- # display_historic_GIF_button = st.button(f'Display Historic GIF for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
479
- # key=f'Display Historic GIF Button - {metric}',
480
- # help='Click to display the historic GIF for the selected field',
481
- # use_container_width=True, type='primary')
482
-
483
- # # If the button is clicked, display the historic GIF
484
- # if display_historic_GIF_button:
485
-
486
- # #Initlize the historic GIF imgs and dates
487
- # st.info('Generating Historic GIF...')
488
- # historic_imgs = []
489
- # historic_imgs_dates = []
490
-
491
- # # Gen the historic GIF
492
- # dates_for_field_bar = st.progress(0)
493
- # with st.spinner('Generating Historic GIF...'):
494
- # with st.empty():
495
- # for i in range(num_historic_dates):
496
- # current_date = historic_avarages_dates_for_field[i]
497
- # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
498
- # historic_imgs.append(current_df)
499
- # historic_imgs_dates.append(current_date)
500
- # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
501
-
502
- # # Create a fig of the historic Img
503
- # fig, ax = plt.subplots(figsize=(10, 5))
504
-
505
- # # Get the current img
506
- # current_df_lat_lon = utils.add_lat_lon_to_gdf_from_geometry(current_df)
507
- # current_img = utils.gdf_column_to_one_band_array(current_df_lat_lon, f'{metric}_{current_date}')
508
-
509
- # # Plot the historic Img
510
- # title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {current_date}'
511
- # ax.imshow(current_img)
512
- # ax.set_title(title)
513
-
514
- # # Display the plot
515
- # st.pyplot(fig)
516
-
517
- # # Create the historic GIF
518
- # historic_GIF_name = f'{metric}_{field_name}_{year}.gif'
519
- # st.write('Creating Historic GIF...', historic_GIF_name)
520
-
521
-
522
- def monitor_fields():
523
- current_user = greeting("Let's take a look how these fields are doing")
524
- if os.path.exists(f"fields_{current_user}.parquet"):
525
- gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
526
- else:
527
- st.info("No Fields Added Yet!")
528
- return
529
- # st.info("Hover over the field to show the properties or check the Existing Fields List below")
530
- # fields_map = gdf.explore()
531
- # sat_basemap = utils.basemaps['Google Satellite']
532
- # sat_basemap.add_to(fields_map)
533
- # folium.LayerControl().add_to(fields_map)
534
- # # output = st_folium(fields_map, key="edit_map", height=300, width=600)
535
- # folium_static(fields_map, height=300, width=600)
536
-
537
- with st.expander("Existing Fields List", expanded=False):
538
- st.write(gdf)
539
-
540
- field_name = select_field(gdf)
541
- if field_name == "Select Field":
542
- st.info("No Field Selected Yet!")
543
-
544
- else:
545
- with st.expander("Metrics Explanation", expanded=False):
546
- st.write("NDVI: Normalized Difference Vegetation Index, Mainly used to monitor the health of vegetation")
547
- st.write("LAI: Leaf Area Index, Mainly used to monitor the productivity of vegetation")
548
- st.write("CAB: Chlorophyll Absorption in the Blue band, Mainly used to monitor the chlorophyll content in vegetation")
549
- st.write("NDMI: Normalized Difference Moisture Index, Mainly used to monitor the moisture content in vegetation")
550
- st.success("More metrics and analysis features will be added soon")
551
- metric = st.radio("Select Metric to Monitor", ["NDVI", "LAI", "CAB", "NDMI"], key="metric", index=0, help="Select the metric to monitor")
552
- st.write(f"Monitoring {metric} for {field_name}")
553
-
554
- track(metric, field_name, gdf, current_user)
555
-
556
-
557
-
558
-
559
- if __name__ == '__main__':
560
- check_authentication()
561
- monitor_fields()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pag/monitor.py CHANGED
@@ -78,12 +78,6 @@ def get_cuarted_df_for_field(df, field, date, metric, clientName):
78
  return curated_df
79
 
80
 
81
-
82
-
83
-
84
-
85
-
86
-
87
  def get_cuarted_df_for_field(df, field, date, metric, clientName):
88
  curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
89
  if curated_date_path is not None:
@@ -109,31 +103,6 @@ def track(metric, field_name, src_df, client_name):
109
  else:
110
  date = st.session_state['date']
111
 
112
- # Give the user the option to select year, start date and end date
113
- # with st.expander('Select Year, Start Date and End Date'):
114
- # # Get the year
115
- # years = [f'20{i}' for i in range(22, 25)]
116
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}')
117
-
118
- # # Set the min, max and default values for start and end dates
119
- # min_val = f'{year}-01-01'
120
- # max_val = f'{year}-12-31'
121
- # default_val = f'{year}-11-01'
122
- # min_val = datetime.strptime(min_val, '%Y-%m-%d')
123
- # max_val = datetime.strptime(max_val, '%Y-%m-%d')
124
- # default_val = datetime.strptime(default_val, '%Y-%m-%d')
125
-
126
- # # Get the start and end dates
127
- # start_date = st.date_input('Start Date', value=default_val, min_value=min_val, max_value=max_val, key=f'Start Date - {metric}')
128
- # end_date = st.date_input('End Date', value=max_val, min_value=min_val, max_value=max_val, key=f'End Date - {metric}')
129
-
130
-
131
- # Get the dates with available data for that field when the user clicks the button
132
- # get_dates_button = st.button(f'Get Dates for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
133
- # key=f'Get Dates Button - {metric}',
134
- # help='Click to get the dates with available data for the selected field',
135
- # use_container_width=True, type='primary')
136
- # if get_dates_button:
137
  if True:
138
  start_date = '2024-01-01'
139
  today = datetime.today()
@@ -142,7 +111,6 @@ def track(metric, field_name, src_df, client_name):
142
 
143
  dates = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
144
  # Add None to the end of the list to be used as a default value
145
- # dates.append(-1)
146
  #sort the dates from earliest to today
147
  dates = sorted(dates)
148
 
@@ -260,264 +228,7 @@ def track(metric, field_name, src_df, client_name):
260
 
261
  else:
262
  st.info('Please Select A Field and A Date')
263
-
264
-
265
- # st.markdown('---')
266
- # st.header('Show Historic Averages')
267
-
268
-
269
- # #Let the user select the year, start date and end date
270
- # with st.expander('Select Year, Start Date and End Date'):
271
- # # Get the year
272
- # years = [f'20{i}' for i in range(22, 25)]
273
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages')
274
-
275
- # # Set the start and end dates to the first and last dates of the year
276
- # start_date = f'{year}-01-01'
277
- # end_date = f'{year}-12-31'
278
-
279
- # # Get the dates for historic averages
280
- # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
281
-
282
- # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
283
- # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
284
- # historic_avarages_dates_for_field.sort()
285
- # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
286
-
287
- # # Get the number of dates
288
- # num_historic_dates = len(historic_avarages_dates_for_field)
289
- # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
290
-
291
- # # Display the historic averages when the user clicks the button
292
- # display_historic_avgs_button = st.button(f'Display Historic Averages for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
293
- # key=f'Display Historic Averages Button - {metric}',
294
- # help='Click to display the historic averages for the selected field',
295
- # use_container_width=True, type='primary')
296
-
297
- # # If the button is clicked, display the historic averages
298
- # if display_historic_avgs_button:
299
-
300
- # #Initlize the historic averages cache dir and file path
301
- # historic_avarages_cache_dir = './historic_avarages_cache'
302
- # historic_avarages_cache_path = f'{historic_avarages_cache_dir}/historic_avarages_cache.joblib'
303
- # historic_avarages_cache_clp_path = f'{historic_avarages_cache_dir}/historic_avarages_cache_clp.joblib'
304
-
305
- # # Load the historic averages cache if it exists, else create it
306
- # if os.path.exists(historic_avarages_cache_path):
307
- # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
308
- # else:
309
- # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
310
- # joblib.dump({}, historic_avarages_cache_path)
311
- # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
312
- # if os.path.exists(historic_avarages_cache_clp_path):
313
- # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
314
- # else:
315
- # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
316
- # joblib.dump({}, historic_avarages_cache_clp_path)
317
- # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
318
-
319
- # found_in_cache = False
320
- # if client_name not in historic_avarages_cache:
321
- # historic_avarages_cache[client_name] = {}
322
- # if metric not in historic_avarages_cache[client_name]:
323
- # historic_avarages_cache[client_name][metric] = {}
324
- # if field_name not in historic_avarages_cache[client_name][metric]:
325
- # historic_avarages_cache[client_name][metric][field_name] = {}
326
- # if year not in historic_avarages_cache[client_name][metric][field_name]:
327
- # historic_avarages_cache[client_name][metric][field_name][year] = {}
328
- # if len(historic_avarages_cache[client_name][metric][field_name][year]) > 0:
329
- # found_in_cache = True
330
-
331
-
332
- # #Check if the field and year are in the cache_clp for the current metric and client
333
- # found_in_cache_clp = False
334
- # if client_name not in historic_avarages_cache_clp:
335
- # historic_avarages_cache_clp[client_name] = {}
336
- # if 'CLP' not in historic_avarages_cache_clp[client_name]:
337
- # historic_avarages_cache_clp[client_name]['CLP'] = {}
338
- # if field_name not in historic_avarages_cache_clp[client_name]['CLP']:
339
- # historic_avarages_cache_clp[client_name]['CLP'][field_name] = {}
340
- # if year not in historic_avarages_cache_clp[client_name]['CLP'][field_name]:
341
- # historic_avarages_cache_clp[client_name]['CLP'][field_name][year] = {}
342
- # if len(historic_avarages_cache_clp[client_name]['CLP'][field_name][year]) > 0:
343
- # found_in_cache_clp = True
344
-
345
-
346
- # # If Found in cache, get the historic averages from the cache
347
- # if found_in_cache and found_in_cache_clp:
348
- # st.info('Found Historic Averages in Cache')
349
- # historic_avarages = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages']
350
- # historic_avarages_dates = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates']
351
- # historic_avarages_clp = historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp']
352
-
353
- # # Else, calculate the historic averages and add them to the cache
354
- # else:
355
- # st.info('Calculating Historic Averages...')
356
-
357
-
358
- # #Empty lists for the historic averages , dates and cloud cover
359
- # historic_avarages = []
360
- # historic_avarages_dates = []
361
- # historic_avarages_clp = []
362
-
363
- # # Get the historic averages
364
- # dates_for_field_bar = st.progress(0)
365
- # with st.spinner('Calculating Historic Averages...'):
366
- # with st.empty():
367
- # for i in range(num_historic_dates):
368
- # # Get the historic average for the current date
369
- # current_date = historic_avarages_dates_for_field[i]
370
- # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
371
- # current_df_clp = get_cuarted_df_for_field(src_df, field_name, current_date, 'CLP', client_name)
372
- # current_avg = current_df[f'{metric}_{current_date}'].mean()
373
- # current_avg_clp = current_df_clp[f'CLP_{current_date}'].mean()
374
- # # Add the historic average and date to the lists
375
- # historic_avarages.append(current_avg)
376
- # historic_avarages_dates.append(current_date)
377
- # historic_avarages_clp.append(current_avg_clp)
378
- # # Update the progress bar
379
- # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
380
-
381
- # # Create a plot of the historic averages with the cloud cover as dashed line and dates as x axis (rotated 90 degrees when needed)
382
- # fig, ax = plt.subplots(figsize=(5, 3))
383
-
384
- # # Set the x axis ticks and labels
385
- # x = historic_avarages_dates
386
- # x_ticks = [i for i in range(len(x))]
387
- # ax.set_xticks(x_ticks)
388
-
389
- # #Set rotation to 90 degrees if the number of dates is greater than 10
390
- # rot = 0 if len(x) < 10 else 90
391
- # ax.set_xticklabels(x, rotation=rot)
392
-
393
- # # Set the y axis ticks and labels
394
- # y1 = historic_avarages
395
- # y2 = historic_avarages_clp
396
- # y_ticks = [i/10 for i in range(11)]
397
- # ax.set_yticks(y_ticks)
398
- # ax.set_yticklabels(y_ticks)
399
-
400
- # # Plot the historic averages and cloud cover
401
- # ax.plot(x_ticks, y1, label=f'{metric} Historic Averages')
402
- # ax.plot(x_ticks, y2, '--', label='Cloud Cover')
403
- # ax.legend()
404
-
405
- # # Set the title and axis labels
406
- # ax.set_title(f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
407
- # ax.set_xlabel('Date')
408
- # ax.set_ylabel(f'{metric} Historic Averages')
409
-
410
- # # Display the plot
411
- # st.pyplot(fig, use_container_width=True)
412
-
413
- # # Add the historic averages to the cache
414
- # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages'] = historic_avarages
415
- # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates'] = historic_avarages_dates
416
- # historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp'] = historic_avarages_clp
417
- # # Save the cache
418
- # joblib.dump(historic_avarages_cache, historic_avarages_cache_path)
419
- # joblib.dump(historic_avarages_cache_clp, historic_avarages_cache_clp_path)
420
- # # Tell the user that the historic averages are saved in the cache
421
- # st.info('Historic Averages Saved in Cache')
422
- # st.write(f'Cache Path: {historic_avarages_cache_path}')
423
- # st.write(f'Cache CLP Path: {historic_avarages_cache_clp_path}')
424
-
425
-
426
- # # Display the historic averages in nice plotly plot
427
- # fig = make_subplots(specs=[[{"secondary_y": True}]])
428
-
429
- # # Add the historic averages to the plot
430
- # fig.add_trace(
431
- # go.Scatter(x=historic_avarages_dates, y=historic_avarages, name=f'{metric} Historic Averages'),
432
- # secondary_y=False,
433
- # )
434
-
435
- # # Add the cloud cover to the plot
436
- # fig.add_trace(
437
- # go.Scatter(x=historic_avarages_dates, y=historic_avarages_clp, name='Cloud Cover'),
438
- # secondary_y=True,
439
- # )
440
-
441
- # # Set the title and axis labels
442
- # fig.update_layout(title_text=f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
443
- # fig.update_xaxes(title_text='Date')
444
- # fig.update_yaxes(title_text=f'{metric} Historic Averages', secondary_y=False)
445
- # fig.update_yaxes(title_text='Cloud Cover', secondary_y=True)
446
-
447
- # # Display the plot
448
- # st.plotly_chart(fig)
449
-
450
-
451
- # st.markdown('---')
452
- # st.header('Show Historic GIF')
453
-
454
-
455
- # #Let the user select the year, start date and end date of the GIF
456
- # with st.expander('Select Year, Start Date and End Date of the GIF'):
457
- # # Get the year
458
- # years = [f'20{i}' for i in range(16, 23)]
459
- # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages GIF')
460
-
461
- # # Set the start and end dates to the first and last dates of the year
462
- # start_date = f'{year}-01-01'
463
- # end_date = f'{year}-12-31'
464
-
465
- # # Get the dates for historic GIF
466
- # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
467
-
468
- # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
469
- # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
470
- # historic_avarages_dates_for_field.sort()
471
- # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
472
-
473
- # # Get the number of dates
474
- # num_historic_dates = len(historic_avarages_dates_for_field)
475
- # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
476
-
477
- # # Display the historic GIF when the user clicks the button
478
- # display_historic_GIF_button = st.button(f'Display Historic GIF for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
479
- # key=f'Display Historic GIF Button - {metric}',
480
- # help='Click to display the historic GIF for the selected field',
481
- # use_container_width=True, type='primary')
482
-
483
- # # If the button is clicked, display the historic GIF
484
- # if display_historic_GIF_button:
485
-
486
- # #Initlize the historic GIF imgs and dates
487
- # st.info('Generating Historic GIF...')
488
- # historic_imgs = []
489
- # historic_imgs_dates = []
490
-
491
- # # Gen the historic GIF
492
- # dates_for_field_bar = st.progress(0)
493
- # with st.spinner('Generating Historic GIF...'):
494
- # with st.empty():
495
- # for i in range(num_historic_dates):
496
- # current_date = historic_avarages_dates_for_field[i]
497
- # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
498
- # historic_imgs.append(current_df)
499
- # historic_imgs_dates.append(current_date)
500
- # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
501
-
502
- # # Create a fig of the historic Img
503
- # fig, ax = plt.subplots(figsize=(10, 5))
504
-
505
- # # Get the current img
506
- # current_df_lat_lon = utils.add_lat_lon_to_gdf_from_geometry(current_df)
507
- # current_img = utils.gdf_column_to_one_band_array(current_df_lat_lon, f'{metric}_{current_date}')
508
-
509
- # # Plot the historic Img
510
- # title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {current_date}'
511
- # ax.imshow(current_img)
512
- # ax.set_title(title)
513
-
514
- # # Display the plot
515
- # st.pyplot(fig)
516
-
517
- # # Create the historic GIF
518
- # historic_GIF_name = f'{metric}_{field_name}_{year}.gif'
519
- # st.write('Creating Historic GIF...', historic_GIF_name)
520
-
521
 
522
  def monitor_fields():
523
  current_user = greeting("Let's take a look how these fields are doing")
@@ -526,13 +237,7 @@ def monitor_fields():
526
  else:
527
  st.info("No Fields Added Yet!")
528
  return
529
- # st.info("Hover over the field to show the properties or check the Existing Fields List below")
530
- # fields_map = gdf.explore()
531
- # sat_basemap = utils.basemaps['Google Satellite']
532
- # sat_basemap.add_to(fields_map)
533
- # folium.LayerControl().add_to(fields_map)
534
- # # output = st_folium(fields_map, key="edit_map", height=300, width=600)
535
- # folium_static(fields_map, height=300, width=600)
536
 
537
  with st.expander("Existing Fields List", expanded=False):
538
  st.write(gdf)
 
78
  return curated_df
79
 
80
 
 
 
 
 
 
 
81
  def get_cuarted_df_for_field(df, field, date, metric, clientName):
82
  curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
83
  if curated_date_path is not None:
 
103
  else:
104
  date = st.session_state['date']
105
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  if True:
107
  start_date = '2024-01-01'
108
  today = datetime.today()
 
111
 
112
  dates = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
113
  # Add None to the end of the list to be used as a default value
 
114
  #sort the dates from earliest to today
115
  dates = sorted(dates)
116
 
 
228
 
229
  else:
230
  st.info('Please Select A Field and A Date')
231
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
 
233
  def monitor_fields():
234
  current_user = greeting("Let's take a look how these fields are doing")
 
237
  else:
238
  st.info("No Fields Added Yet!")
239
  return
240
+
 
 
 
 
 
 
241
 
242
  with st.expander("Existing Fields List", expanded=False):
243
  st.write(gdf)
test.py DELETED
@@ -1,43 +0,0 @@
1
- import streamlit as st
2
-
3
- # Set page configuration
4
- st.set_page_config(
5
- page_title="Your App Title",
6
- page_icon=":shark:",
7
- layout="wide", # Use "wide" for expanded layout
8
- initial_sidebar_state="expanded",
9
- )
10
-
11
- # def local_css(file_name):
12
- # with open(file_name) "r") as f:
13
- # st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
14
-
15
- # Write CSS to apply styles
16
- def custom_css():
17
- st.markdown("""
18
- <style>
19
- html, body, [data-testid="stAppViewContainer"] {
20
- background-color: #000000; /* Black background */
21
- color: #FFFFFF; /* White text color */
22
- }
23
- .stTextInput > label, .stSelectbox > label, .stRadio > label, .stCheckbox > label {
24
- color: #CCCCCC; /* Lighter text for better contrast */
25
- }
26
- /* Additional styling can be added here */
27
- </style>
28
- """, unsafe_allow_html=True)
29
-
30
- # Load CSS file (if you have a CSS file you prefer to use)
31
- # local_css("styles.css")
32
-
33
- # Apply custom CSS
34
- custom_css()
35
-
36
- # Your app code
37
- st.title("Your Streamlit App")
38
- st.write("This is a sample app with a black background.")
39
-
40
- # Example of other components
41
- st.text_input("Enter some text")
42
- st.selectbox("Choose an option", ["Option 1", "Option 2", "Option 3"])
43
- st.checkbox("Check me out")