A-O98 commited on
Commit
2cdce84
·
1 Parent(s): 54c4900

first commit

Browse files
Files changed (30) hide show
  1. .gitattributes +2 -35
  2. .gitignore +160 -0
  3. LICENSE +21 -0
  4. README.md +79 -4
  5. app.py +52 -0
  6. authentication.py +157 -0
  7. data.geojson +22 -0
  8. fields.geojson +7 -0
  9. fields.parquet +0 -0
  10. fields_asim.parquet +0 -0
  11. fields_dfr4.parquet +0 -0
  12. main.py +52 -0
  13. pag/add_field.py +194 -0
  14. pag/edit.py +110 -0
  15. pag/moniter.py +561 -0
  16. pag/monitor.py +561 -0
  17. playground.ipynb +155 -0
  18. process.py +45 -0
  19. requirements.txt +11 -0
  20. scripts/cab.js +175 -0
  21. scripts/clp.js +16 -0
  22. scripts/fcover.js +171 -0
  23. scripts/lai.js +179 -0
  24. scripts/ndmi.js +24 -0
  25. scripts/ndvi.js +179 -0
  26. scripts/truecolor.js +16 -0
  27. senHub.py +98 -0
  28. test.py +43 -0
  29. users.db +0 -0
  30. utils.py +205 -0
.gitattributes CHANGED
@@ -1,35 +1,2 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ # Auto detect text files and perform LF normalization
2
+ * text=auto
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.gitignore ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
105
+ __pypackages__/
106
+
107
+ # Celery stuff
108
+ celerybeat-schedule
109
+ celerybeat.pid
110
+
111
+ # SageMath parsed files
112
+ *.sage.py
113
+
114
+ # Environments
115
+ .env
116
+ .venv
117
+ env/
118
+ venv/
119
+ ENV/
120
+ env.bak/
121
+ venv.bak/
122
+
123
+ # Spyder project settings
124
+ .spyderproject
125
+ .spyproject
126
+
127
+ # Rope project settings
128
+ .ropeproject
129
+
130
+ # mkdocs documentation
131
+ /site
132
+
133
+ # mypy
134
+ .mypy_cache/
135
+ .dmypy.json
136
+ dmypy.json
137
+
138
+ # Pyre type checker
139
+ .pyre/
140
+
141
+ # pytype static type analyzer
142
+ .pytype/
143
+
144
+ # Cython debug symbols
145
+ cython_debug/
146
+
147
+ # PyCharm
148
+ # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
149
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
150
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
151
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
152
+ #.idea/
153
+
154
+
155
+ .streamlit/
156
+ secrets.toml
157
+ *.pdf
158
+ *.toml
159
+ dfr4/
160
+ shapefiles/
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Ammar Nasr
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  title: Field Monitoring
3
- emoji:
4
- colorFrom: blue
5
- colorTo: gray
6
  sdk: streamlit
7
  sdk_version: 1.33.0
8
  app_file: app.py
@@ -10,4 +10,79 @@ pinned: false
10
  license: mit
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  title: Field Monitoring
3
+ emoji: 🧑‍🌾 🌴
4
+ colorFrom: purple
5
+ colorTo: red
6
  sdk: streamlit
7
  sdk_version: 1.33.0
8
  app_file: app.py
 
10
  license: mit
11
  ---
12
 
13
+ # agriAI
14
+
15
+ Welcome to agriAI, an advanced field monitoring application designed to help farmers and agricultural researchers monitor and manage their fields more effectively. This project utilizes cutting-edge technologies to provide real-time insights into crop health, soil conditions, and environmental factors.
16
+
17
+ ## Features
18
+
19
+ - **Real-Time Monitoring**: Track field conditions in real time to make informed decisions quickly.
20
+ - **Data Analysis**: Leverage data collected from various sensors to analyze soil health and crop conditions.
21
+ - **User Authentication**: Secure login and signup functionalities to ensure data privacy.
22
+ - **Field Management**: Tools to add, edit, and monitor different fields.
23
+ - **Responsive Design**: Accessible on various devices, ensuring functionality across platforms.
24
+
25
+ ## Getting Started
26
+
27
+ These instructions will get you a copy of the project up and running on your local machine for development and testing purposes.
28
+
29
+ ### Prerequisites
30
+
31
+ What you need to install the software:
32
+
33
+ - Python 3.8+
34
+ - pip
35
+ - Virtualenv (optional, but recommended)
36
+
37
+ ### Installation
38
+
39
+ Step-by-step guide to setting up a development environment:
40
+
41
+ 1. **Clone the repository**
42
+
43
+ ```bash
44
+ git clone https://github.com/ammarnasr/agriAI.git
45
+ cd agriAI
46
+ ```
47
+
48
+ 2. **Set up a Python virtual environment (Optional)**
49
+
50
+ ```bash
51
+ python -m venv venv
52
+ source venv/bin/activate # On Windows use `venv\Scripts\activate`
53
+ ```
54
+
55
+ 3. **Install the requirements**
56
+
57
+ ```bash
58
+ pip install -r requirements.txt
59
+ ```
60
+
61
+ 4. **Run the application**
62
+
63
+ ```bash
64
+ streamlit run main.py
65
+ ```
66
+
67
+ ## Usage
68
+
69
+ After installation, launch the application, and navigate to `http://localhost:8501` in your web browser to see the app in action.
70
+
71
+ To log in or sign up, click the respective buttons on the main page. After authentication, use the sidebar to navigate between different functionalities like adding fields, editing them, or monitoring existing ones.
72
+
73
+ ## Contributing
74
+
75
+ We welcome contributions to agriAI! If you have suggestions for improvements or bug fixes, please feel free to fork the repository and submit a pull request.
76
+
77
+ 1. Fork the repository.
78
+ 2. Create your feature branch (`git checkout -b feature/fooBar`).
79
+ 3. Commit your changes (`git commit -am 'Add some fooBar'`).
80
+ 4. Push to the branch (`git push origin feature/fooBar`).
81
+ 5. Create a new Pull Request.
82
+
83
+ ## License
84
+
85
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
86
+
87
+ ## Acknowledgments
88
+
app.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ import streamlit as st
3
+ import authentication
4
+ import streamlit as st
5
+ from pag import add_field, edit, monitor
6
+
7
+ # from pages import add_field, edit, moniter
8
+ def authenticate_user():
9
+ st.title("Welcome to :orange[Field Monitoring App]")
10
+ st.markdown("""
11
+ <style>
12
+ .stSelectbox > div > div {cursor: pointer;}
13
+ </style>
14
+ """, unsafe_allow_html=True)
15
+ if not st.session_state.authenticated:
16
+ choice = st.selectbox("Interested? Sign up or log in if you have an account",options=["Home","Login","SignUp"])
17
+
18
+ if choice == "Home":
19
+ st.write("App Description")
20
+
21
+ elif choice == "Login":
22
+ authentication.login()
23
+ elif choice == "SignUp":
24
+ authentication.signup()
25
+
26
+ return False
27
+
28
+ def main():
29
+
30
+ if "authenticated" not in st.session_state:
31
+ st.session_state.authenticated = False
32
+
33
+ if st.session_state.authenticated:
34
+ st.sidebar.title(":blue[Field Management Options]")
35
+ options = st.sidebar.radio("Choose an option:",
36
+ ("Add Field", "Edit Fields/Add Field Info", "Monitor"))
37
+
38
+ if options == "Add Field":
39
+ st.title("Welcome to :orange[Field Monitoring App]")
40
+
41
+ add_field.add_drawing()
42
+
43
+ elif options == "Edit Fields/Add Field Info":
44
+ st.title("Welcome to :orange[Field Monitoring App]")
45
+ edit.edit_fields()
46
+ elif options == "Monitor":
47
+ st.title("Welcome to :orange[Field Monitoring App]")
48
+ monitor.monitor_fields()
49
+ else:
50
+ authenticate_user()
51
+ if __name__ == "__main__":
52
+ main()
authentication.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # authentication.py
2
+ import streamlit as st
3
+ import pandas as pd
4
+ import sqlite3
5
+ import time
6
+ import re
7
+ import hashlib
8
+ import hmac
9
+
10
+
11
+ def greeting(msg="Welcome"):
12
+ current_user = st.session_state['current_user']
13
+ st.write(f"{msg} {current_user}!")
14
+ return current_user
15
+
16
+
17
+ def check_password():
18
+ """Returns `True` if the user had a correct password."""
19
+
20
+ def login_form():
21
+ """Form with widgets to collect user information"""
22
+ st.info("Demo credentials: dfr4 / 12345")
23
+ with st.form("Credentials"):
24
+ st.text_input("Username", key="username")
25
+ st.text_input("Password", type="password", key="password")
26
+ st.form_submit_button("Log in", on_click=password_entered)
27
+
28
+ def password_entered():
29
+ """Checks whether a password entered by the user is correct."""
30
+ st.session_state['current_user'] = st.session_state["username"]
31
+ if st.session_state["username"] in st.secrets[
32
+ "passwords" # here is where you should connect to the database
33
+ ] and hmac.compare_digest(
34
+ st.session_state["password"],
35
+ st.secrets.passwords[st.session_state["username"]],
36
+ ):
37
+ st.session_state["password_correct"] = True
38
+ del st.session_state["password"] # Don't store the username or password.
39
+ del st.session_state["username"]
40
+ else:
41
+ st.session_state["password_correct"] = False
42
+
43
+ # Return True if the username + password is validated.
44
+ if st.session_state.get("password_correct", False):
45
+ return True
46
+
47
+ # Show inputs for username + password.
48
+ login_form()
49
+ if "password_correct" in st.session_state:
50
+ st.error("😕 User not known or password incorrect")
51
+ return False
52
+
53
+
54
+ # Security
55
+ def make_hashes(password):
56
+ return hashlib.sha256(str.encode(password)).hexdigest()
57
+
58
+ def check_hashes(password, hashed_text):
59
+ if make_hashes(password) == hashed_text:
60
+ return hashed_text
61
+ return False
62
+
63
+ # DB Management
64
+ conn = sqlite3.connect('users.db', check_same_thread=False)
65
+ c = conn.cursor()
66
+
67
+ # DB Functions
68
+ def create_usertable():
69
+ c.execute('''CREATE TABLE IF NOT EXISTS userstable (
70
+ username TEXT UNIQUE,
71
+ email TEXT UNIQUE,
72
+ password TEXT)''')
73
+ conn.commit()
74
+
75
+ def add_userdata(username, email, password):
76
+ c.execute('INSERT INTO userstable(username, email, password) VALUES (?, ?, ?)', (username, email, password))
77
+ conn.commit()
78
+
79
+ def login_user(username, password):
80
+ c.execute('SELECT * FROM userstable WHERE username =? AND password = ?', (username, password))
81
+ data = c.fetchall()
82
+ return data
83
+
84
+ def view_all_users():
85
+ c.execute('SELECT * FROM userstable')
86
+ data = c.fetchall()
87
+ return data
88
+
89
+ def username_exists(username):
90
+ c.execute('SELECT * FROM userstable WHERE username = ?', (username,))
91
+ return c.fetchone() is not None
92
+
93
+ def email_exists(email):
94
+ # c.execute('SELECT * FROM userstable WHERE email = ?', (email,))
95
+ # return c.fetchone() is not None
96
+ return False
97
+
98
+ # Validators
99
+ def is_valid_email(email):
100
+ regex = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'
101
+ return re.fullmatch(regex, email)
102
+
103
+ def is_strong_password(password):
104
+ regex = r'^(?=.*[A-Za-z])(?=.*\d)[A-Za-z\d]{8,}$'
105
+ return re.fullmatch(regex, password)
106
+
107
+ # Signup Function
108
+ def signup():
109
+ st.subheader("Create New Account")
110
+ new_user = st.text_input("Username")
111
+ Email_address = st.text_input("Email")
112
+ new_password = st.text_input("Password", type='password')
113
+ confirm_password = st.text_input("Confirm Password", type='password')
114
+
115
+ create = st.button("Signup")
116
+ if create:
117
+ if username_exists(new_user):
118
+ st.error("Username is already taken")
119
+ elif email_exists(Email_address):
120
+ st.error("Email is already registered")
121
+ elif not is_valid_email(Email_address):
122
+ st.error("Invalid email address")
123
+ elif not is_strong_password(new_password):
124
+ st.error("Password too weak. Must be 8 characters long and include numbers and letters.")
125
+ elif new_password != confirm_password:
126
+ st.error("Passwords do not match")
127
+ else:
128
+ add_userdata(new_user, Email_address, make_hashes(new_password))
129
+ st.success("You have successfully created a valid Account")
130
+ st.info("Go to Login Menu to login")
131
+ # Clearing the form
132
+ for field in ['new_user', 'Email_address', 'new_password', 'confirm_password']:
133
+ if field in st.session_state:
134
+ st.session_state[field] = ''
135
+
136
+ # Login Function
137
+ def login():
138
+ username = st.text_input("User Name", key="username")
139
+ password = st.text_input("Password", type='password', key='password')
140
+
141
+ if st.button("Login"):
142
+ hashed_pswd = make_hashes(password)
143
+
144
+ result = login_user(username, hashed_pswd)
145
+ if result:
146
+ st.success("Logged In as {}".format(username))
147
+ st.session_state.authenticated = True
148
+ st.session_state['current_user'] = username
149
+ # Clear sensitive states
150
+ del st.session_state["password"]
151
+ del st.session_state["username"]
152
+ st.rerun()
153
+ else:
154
+ st.warning("Incorrect Username/Password")
155
+
156
+ # Call create_usertable to ensure the table is created/updated when the script runs
157
+ create_usertable()
data.geojson ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "FeatureCollection",
3
+ "features": [
4
+ {
5
+ "type": "Feature",
6
+ "properties": {},
7
+ "geometry": {
8
+ "type": "Polygon",
9
+ "coordinates": [
10
+ [
11
+ [32.584179, 15.217575],
12
+ [32.726973, 13.546143],
13
+ [33.957199, 13.973163],
14
+ [34.286724, 14.739791],
15
+ [33.69358, 15.620197],
16
+ [32.584179, 15.217575]
17
+ ]
18
+ ]
19
+ }
20
+ }
21
+ ]
22
+ }
fields.geojson ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "FeatureCollection",
3
+ "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
4
+ "features": [
5
+ { "type": "Feature", "properties": { "name": "tri", "area": 0.0, "crop": "", "sowing_date": "2024-03-16", "notes": "" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.70916, 33.008871 ], [ 14.078534, 32.687414 ], [ 14.078534, 33.948826 ], [ 14.70916, 33.008871 ] ] ] } }
6
+ ]
7
+ }
fields.parquet ADDED
Binary file (6.42 kB). View file
 
fields_asim.parquet ADDED
Binary file (7.24 kB). View file
 
fields_dfr4.parquet ADDED
Binary file (8.54 kB). View file
 
main.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ import streamlit as st
3
+ import authentication
4
+ import streamlit as st
5
+ from pag import add_field, edit, moniter
6
+
7
+ # from pages import add_field, edit, moniter
8
+ def authenticate_user():
9
+ st.title("Welcome to :orange[Field Monitoring App]")
10
+ st.markdown("""
11
+ <style>
12
+ .stSelectbox > div > div {cursor: pointer;}
13
+ </style>
14
+ """, unsafe_allow_html=True)
15
+ if not st.session_state.authenticated:
16
+ choice = st.selectbox("Interested? Sign up or log in if you have an account",options=["Home","Login","SignUp"])
17
+
18
+ if choice == "Home":
19
+ st.write("App Description")
20
+
21
+ elif choice == "Login":
22
+ authentication.login()
23
+ elif choice == "SignUp":
24
+ authentication.signup()
25
+
26
+ return False
27
+
28
+ def main():
29
+
30
+ if "authenticated" not in st.session_state:
31
+ st.session_state.authenticated = False
32
+
33
+ if st.session_state.authenticated:
34
+ st.sidebar.title("Navigation")
35
+ options = st.sidebar.radio("Choose an option:",
36
+ ("Add Field", "Edit", "Monitor"))
37
+
38
+ if options == "Add Field":
39
+ st.title("Welcome to :orange[Field Monitoring App]")
40
+
41
+ add_field.add_drawing()
42
+
43
+ elif options == "Edit":
44
+ st.title("Welcome to :orange[Field Monitoring App]")
45
+ edit.edit_fields()
46
+ elif options == "Monitor":
47
+ st.title("Welcome to :orange[Field Monitoring App]")
48
+ moniter.monitor_fields()
49
+ else:
50
+ authenticate_user()
51
+ if __name__ == "__main__":
52
+ main()
pag/add_field.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import utils
2
+ import os
3
+ import folium
4
+ import pandas as pd
5
+ import streamlit as st
6
+ import geopandas as gpd
7
+ from folium.plugins import Draw
8
+ from shapely.geometry import Polygon
9
+ from streamlit_folium import st_folium
10
+ from authentication import greeting, check_password
11
+ import shapely.ops as ops
12
+ from functools import partial
13
+ import pyproj
14
+
15
+ def check_authentication():
16
+ if not check_password():
17
+ st.stop()
18
+
19
+
20
+
21
+
22
+
23
+ def display_existing_fields(current_user):
24
+ with st.expander("Existing Fields", expanded=False):
25
+ if os.path.exists(f"fields_{current_user}.parquet"):
26
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
27
+ st.write(gdf)
28
+ mm = gdf.explore()
29
+ st_folium(mm)
30
+ else:
31
+ st.info("No Fields Added Yet!")
32
+
33
+ def add_existing_fields_to_map(m, current_user):
34
+ if os.path.exists(f"fields_{current_user}.parquet"):
35
+ fg = folium.FeatureGroup(name="Existing Fields", control=True).add_to(m)
36
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
37
+ for i, row in gdf.iterrows():
38
+ edges = row['geometry'].exterior.coords.xy
39
+ edges = [[i[1], i[0]] for i in zip(*edges)]
40
+ folium.Polygon(edges, color='blue', fill=True, fill_color='blue', fill_opacity=0.6).add_to(fg)
41
+ return m
42
+
43
+ def get_center_of_existing_fields(current_user):
44
+ if os.path.exists(f"fields_{current_user}.parquet"):
45
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
46
+ edges = gdf['geometry'][0].exterior.coords.xy
47
+ edges = [[i[1], i[0]] for i in zip(*edges)]
48
+ edges_center = [sum([i[0] for i in edges]) / len(edges), sum([i[1] for i in edges]) / len(edges)]
49
+
50
+ return edges_center
51
+ return [15.572363674301132, 32.69167103104079]
52
+
53
+ def display_map_and_drawing_controls(m, center_start):
54
+ zoom_start = 13
55
+ if st.session_state['active_drawing'] is None:
56
+ st.info("IMPORTANT: Click on the drawing to confirm the drawn field", icon="🚨")
57
+ sat_basemap = utils.basemaps['Google Satellite']
58
+ sat_basemap.add_to(m)
59
+ folium.LayerControl().add_to(m)
60
+ output = st_folium(m, center=center_start, zoom=zoom_start, key="new", width=800)
61
+ active_drawing = output['last_active_drawing']
62
+ st.session_state['active_drawing'] = active_drawing
63
+ return False
64
+ else:
65
+ st.info("Drawing Captured! Click on the button below to Clear Drawing and Draw Again")
66
+ active_drawing = st.session_state['active_drawing']
67
+ new_map = folium.Map(location=center_start, zoom_start=8)
68
+ edges = [[i[1], i[0]] for i in active_drawing['geometry']['coordinates'][0]]
69
+ edges_center = [sum([i[0] for i in edges]) / len(edges), sum([i[1] for i in edges]) / len(edges)]
70
+ folium.Polygon(edges, color='green', fill=True, fill_color='green', fill_opacity=0.6, name="New Field").add_to(new_map)
71
+ sat_basemap = utils.basemaps['Google Satellite']
72
+ sat_basemap.add_to(new_map)
73
+ folium.LayerControl().add_to(new_map)
74
+ st_folium(new_map, center=edges_center, zoom=zoom_start, key="drawn", width=800)
75
+ return True
76
+
77
+ def handle_user_actions(active_drawing, current_user, intersects, within_area):
78
+ draw_again_col, add_field_info_col = st.columns([1, 1])
79
+ with draw_again_col:
80
+ draw_again = st.button("Draw Again", key="draw_again", help="Click to Clear Drawing and Draw Again",
81
+ type="primary", use_container_width=True, disabled=st.session_state['active_drawing'] is None)
82
+ if draw_again:
83
+ st.session_state['active_drawing'] = None
84
+ st.rerun()
85
+ with add_field_info_col:
86
+ if st.session_state['active_drawing'] is None:
87
+ st.info("Drawing not captured yet!")
88
+ else:
89
+ field_name = st.text_input("Field Name*", help="Enter a distinct name for the field", key="field_name")
90
+ if field_name == "":
91
+ st.warning("Field Name cannot be empty!")
92
+ if os.path.exists(f"fields_{current_user}.parquet"):
93
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
94
+ if field_name in gdf['name'].tolist():
95
+ st.warning("Field Name already exists. Please enter a different name!")
96
+ submit = st.button("Submit", key="submit", help="Click to Submit Field Information", type="primary",
97
+ use_container_width=True,disabled=(st.session_state['active_drawing'] is None or field_name == "") or intersects or not within_area)
98
+ if submit:
99
+ save_field_information(active_drawing, field_name, current_user)
100
+ st.success("Field Information Submitted Successfully!")
101
+ st.session_state['active_drawing'] = None
102
+ st.rerun()
103
+
104
+ def save_field_information(active_drawing, field_name, current_user):
105
+ edges = [[i[0], i[1]] for i in active_drawing['geometry']['coordinates'][0]]
106
+ geom = Polygon(edges)
107
+ field_dict = {
108
+ "name": field_name,
109
+ "geometry": geom
110
+ }
111
+ gdf = gpd.GeoDataFrame([field_dict], geometry='geometry')
112
+ gdf.crs = "EPSG:4326"
113
+ if os.path.exists(f"fields_{current_user}.parquet"):
114
+ old_gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
115
+ gdf = gpd.GeoDataFrame(pd.concat([old_gdf, gdf], ignore_index=True), crs="EPSG:4326")
116
+ gdf.to_parquet(f"fields_{current_user}.parquet")
117
+
118
+ def initialize_active_drawing_state():
119
+ if 'active_drawing' not in st.session_state:
120
+ st.session_state['active_drawing'] = None
121
+ if 'current_user' not in st.session_state:
122
+ st.session_state['current_user'] = None
123
+
124
+
125
+
126
+ def check_intersection_with_existing_fields(active_drawing, current_user):
127
+ if active_drawing is None:
128
+ return False
129
+ if os.path.exists(f"fields_{current_user}.parquet"):
130
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
131
+ edges = [[i[0], i[1]] for i in active_drawing['geometry']['coordinates'][0]]
132
+ geom = Polygon(edges)
133
+ geom = gpd.GeoSeries([geom]*len(gdf), crs="EPSG:4326")
134
+ geom1 = geom.to_crs(gdf.crs)
135
+ geom2 = gdf.geometry.to_crs(gdf.crs)
136
+ if geom1.overlaps(geom2).any():
137
+ st.warning("Field intersects with existing fields. Please draw again!")
138
+ with st.expander("Intersecting Fields", expanded=False):
139
+ m = geom1.explore(name= "New Field", color="red")
140
+ m = gdf.explore(m=m, name="Existing Fields", color="blue")
141
+ st_folium(m)
142
+ return True
143
+ return False
144
+
145
+
146
+
147
+ def check_polygon_area_within_range(active_drawing, min_area_km2=1, max_area_km2=10):
148
+ if active_drawing is None:
149
+ return
150
+ edges = [[i[0], i[1]] for i in active_drawing['geometry']['coordinates'][0]]
151
+ geom = Polygon(edges)
152
+ geom_area = ops.transform(
153
+ partial(
154
+ pyproj.transform,
155
+ pyproj.Proj(init='EPSG:4326'),
156
+ pyproj.Proj(proj='aea',lat_1=geom.bounds[1], lat_2=geom.bounds[3]))
157
+ , geom)
158
+ geom_area = geom_area.area / 10**6
159
+ if geom_area < min_area_km2:
160
+ st.warning(f"Field area is less than {min_area_km2} km2. Please draw again!")
161
+ return False
162
+ if geom_area > max_area_km2:
163
+ st.warning(f"Field area is more than {max_area_km2} km2. Please draw again!")
164
+ return False
165
+ return True
166
+
167
+
168
+ def add_drawing():
169
+ initialize_active_drawing_state()
170
+ current_user = greeting("Drag and Zoom and draw your fields on the map, make sure to name them uniquely")
171
+ current_user = st.session_state['current_user']
172
+ display_existing_fields(current_user)
173
+
174
+ center_start = get_center_of_existing_fields(current_user)
175
+ zoom_start = 13
176
+ m = folium.Map(location=center_start, zoom_start=zoom_start)
177
+
178
+ draw_options = {'polyline': False, 'polygon': True, 'rectangle': True, 'circle': False, 'marker': False, 'circlemarker': False}
179
+ Draw(export=True, draw_options=draw_options).add_to(m)
180
+ m = add_existing_fields_to_map(m, current_user)
181
+
182
+
183
+
184
+ captured = display_map_and_drawing_controls(m, center_start)
185
+ if captured:
186
+ intersects = check_intersection_with_existing_fields(st.session_state['active_drawing'], current_user)
187
+ within_area = check_polygon_area_within_range(st.session_state['active_drawing'])
188
+ handle_user_actions(st.session_state['active_drawing'], current_user, intersects, within_area)
189
+
190
+
191
+ if __name__ == '__main__':
192
+ check_authentication()
193
+
194
+ add_drawing()
pag/edit.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import utils
3
+ import streamlit as st
4
+ import geopandas as gpd
5
+ from streamlit_folium import st_folium, folium_static
6
+ from authentication import greeting, check_password
7
+ import folium
8
+ import time
9
+ def check_authentication():
10
+ if not check_password():
11
+ st.stop()
12
+
13
+
14
+ def add_properties(df, col_name, value, field_name):
15
+ if col_name not in df.columns:
16
+ df[col_name] = None
17
+ df.loc[df['name'] == field_name, col_name] = value
18
+ return df
19
+
20
+ def select_field(gdf):
21
+ names = gdf['name'].tolist()
22
+ names.append("Select Field")
23
+ field_name = st.selectbox("Select Field", options=names, key="field_name_edit", help="Select the field to edit", index=len(names)-1)
24
+ return field_name
25
+
26
+ def read_crop_type():
27
+ crop_type = st.text_input("Field Crop*", help="Enter the crop type", key="field_crop", value='')
28
+ return crop_type
29
+
30
+ def read_irrigation_type():
31
+ irrigation_type = st.selectbox("Field Type*", options=["Rainfed", "Irrigated", ""], key="field_type", help="Select the field type", index=2)
32
+ return irrigation_type
33
+
34
+ def read_custom_property():
35
+ custom_property_name = st.text_input("Custom Property Name*", help="Enter the custom property name", key="custom_property_name", value='')
36
+ custom_property_value = st.text_input("Custom Property Value*", help="Enter the custom property value", key="custom_property_value", value='', disabled=custom_property_name == "" or custom_property_name == "")
37
+ return custom_property_name, custom_property_value
38
+
39
+
40
+
41
+
42
+
43
+ def edit_fields():
44
+ current_user = greeting("Changed your mind? Edit , Add or Delete Fields easily")
45
+ file_path = f"fields_{current_user}.parquet"
46
+ if os.path.exists(file_path):
47
+ gdf = gpd.read_parquet(file_path)
48
+ else:
49
+ st.info("No Fields Added Yet!")
50
+ return
51
+ st.info("Hover over the field to show the properties or check the Existing Fields List below")
52
+ fields_map = gdf.explore()
53
+ sat_basemap = utils.basemaps['Google Satellite']
54
+ sat_basemap.add_to(fields_map)
55
+ folium.LayerControl().add_to(fields_map)
56
+ folium_static(fields_map, height=300, width=600)
57
+
58
+ with st.expander("Existing Fields List", expanded=False):
59
+ st.write(gdf)
60
+
61
+ field_name = select_field(gdf)
62
+ if field_name == "Select Field":
63
+ st.info("No Field Selected Yet!")
64
+
65
+ else:
66
+ delete_edit = st.radio("Delete or Edit Field?", options=["View", "Edit", "Delete"], key="delete_edit", help="Select the operation to perform")
67
+ if delete_edit == "View":
68
+ field = gdf[gdf['name'] == field_name]
69
+ st.write(field)
70
+ elif delete_edit == "Delete":
71
+ delete = st.button("Delete Field", key="delete", help="Click to Delete Field", type="primary", use_container_width=True)
72
+ if delete:
73
+ if len(gdf) == 1 and (gdf['name'] == field_name).all(): # Check if this is the only field left
74
+ os.remove(file_path) # Delete the .parquet file if it's the last field
75
+ st.success("All fields deleted. The data file has been removed.")
76
+ time.sleep(0.3)
77
+ st.rerun()
78
+ else:
79
+ gdf = gdf[gdf['name'] != field_name]
80
+ gdf.to_parquet(file_path)
81
+ st.success("Field Deleted Successfully!")
82
+ time.sleep(0.3)
83
+ st.rerun()
84
+ else:
85
+ no_input = True
86
+ crop_type = read_crop_type()
87
+ irrigation_type = read_irrigation_type()
88
+ custom_property_name, custom_property_value = read_custom_property()
89
+ if crop_type != "" or irrigation_type != "" or custom_property_value != "":
90
+ no_input = False
91
+
92
+ submit = st.button("Submit", key="submitProperties", help="Click to Submit Field Information", type="primary",
93
+ use_container_width=True, disabled=no_input)
94
+ if submit:
95
+ if crop_type != "":
96
+ gdf = add_properties(gdf, "crop", crop_type, field_name)
97
+ if irrigation_type != "":
98
+ gdf = add_properties(gdf, "irrigation", irrigation_type, field_name)
99
+ if custom_property_name != "" and custom_property_value != "":
100
+ gdf = add_properties(gdf, custom_property_name, custom_property_value, field_name)
101
+ gdf.to_parquet(f"fields_{current_user}.parquet")
102
+ # st.rerun()
103
+ st.success("Field Information Updated Successfully!")
104
+ st.info("Please Select View above to see the updated field information")
105
+
106
+
107
+ if __name__ == '__main__':
108
+ check_authentication()
109
+
110
+ edit_fields()
pag/moniter.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import utils
3
+ import streamlit as st
4
+ import geopandas as gpd
5
+ from streamlit_folium import st_folium, folium_static
6
+ from authentication import greeting, check_password
7
+ import folium
8
+ from senHub import SenHub
9
+ from datetime import datetime
10
+ from sentinelhub import SHConfig, MimeType
11
+ import requests
12
+ import process
13
+ import joblib
14
+ from zipfile import ZipFile
15
+ import matplotlib.pyplot as plt
16
+ from plotly.subplots import make_subplots
17
+ import plotly.graph_objects as go
18
+
19
+ def check_authentication():
20
+ if not check_password():
21
+ st.stop()
22
+
23
+
24
+
25
+ config = SHConfig()
26
+ config.instance_id = '6c220beb-90c4-4131-b658-10cddd8d97b9'
27
+ config.sh_client_id = '17e7c154-7f2d-4139-b1af-cef762385079'
28
+ config.sh_client_secret = 'KvbQMKZB85ZWEgWuxqiWIVEvTAQEfoF9'
29
+
30
+
31
+ def select_field(gdf):
32
+ names = gdf['name'].tolist()
33
+ names.append("Select Field")
34
+ field_name = st.selectbox("Select Field", options=names, key="field_name_monitor", help="Select the field to edit", index=len(names)-1)
35
+ return field_name
36
+
37
+
38
+ def calculate_bbox(df, field):
39
+ bbox = df.loc[df['name'] == field].bounds
40
+ r = bbox.iloc[0]
41
+ return [r.minx, r.miny, r.maxx, r.maxy]
42
+
43
+ def get_available_dates_for_field(df, field, year, start_date='', end_date=''):
44
+ bbox = calculate_bbox(df, field)
45
+ token = SenHub(config).token
46
+ headers = utils.get_bearer_token_headers(token)
47
+ if start_date == '' or end_date == '':
48
+ start_date = f'{year}-01-01'
49
+ end_date = f'{year}-12-31'
50
+ data = f'{{ "collections": [ "sentinel-2-l2a" ], "datetime": "{start_date}T00:00:00Z/{end_date}T23:59:59Z", "bbox": {bbox}, "limit": 100, "distinct": "date" }}'
51
+ response = requests.post('https://services.sentinel-hub.com/api/v1/catalog/search', headers=headers, data=data)
52
+ try:
53
+ features = response.json()['features']
54
+ except:
55
+ print(response.json())
56
+ features = []
57
+ return features
58
+
59
+ @st.cache_data
60
+ def get_and_cache_available_dates(_df, field, year, start_date, end_date):
61
+ dates = get_available_dates_for_field(_df, field, year, start_date, end_date)
62
+ print(f'Caching Dates for {field}')
63
+ return dates
64
+
65
+
66
+
67
+
68
+ def get_cuarted_df_for_field(df, field, date, metric, clientName):
69
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
70
+ if curated_date_path is not None:
71
+ curated_df = gpd.read_file(curated_date_path)
72
+ else:
73
+ process.Download_image_in_given_date(clientName, metric, df, field, date)
74
+ process.mask_downladed_image(clientName, metric, df, field, date)
75
+ process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
76
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
77
+ curated_df = gpd.read_file(curated_date_path)
78
+ return curated_df
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+ def get_cuarted_df_for_field(df, field, date, metric, clientName):
88
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
89
+ if curated_date_path is not None:
90
+ curated_df = gpd.read_file(curated_date_path)
91
+ else:
92
+ process.Download_image_in_given_date(clientName, metric, df, field, date)
93
+ process.mask_downladed_image(clientName, metric, df, field, date)
94
+ process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
95
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
96
+ curated_df = gpd.read_file(curated_date_path)
97
+ return curated_df
98
+
99
+ def track(metric, field_name, src_df, client_name):
100
+
101
+ dates = []
102
+ date = -1
103
+ if 'dates' not in st.session_state:
104
+ st.session_state['dates'] = dates
105
+ else:
106
+ dates = st.session_state['dates']
107
+ if 'date' not in st.session_state:
108
+ st.session_state['date'] = date
109
+ else:
110
+ date = st.session_state['date']
111
+
112
+ # Give the user the option to select year, start date and end date
113
+ # with st.expander('Select Year, Start Date and End Date'):
114
+ # # Get the year
115
+ # years = [f'20{i}' for i in range(22, 25)]
116
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}')
117
+
118
+ # # Set the min, max and default values for start and end dates
119
+ # min_val = f'{year}-01-01'
120
+ # max_val = f'{year}-12-31'
121
+ # default_val = f'{year}-11-01'
122
+ # min_val = datetime.strptime(min_val, '%Y-%m-%d')
123
+ # max_val = datetime.strptime(max_val, '%Y-%m-%d')
124
+ # default_val = datetime.strptime(default_val, '%Y-%m-%d')
125
+
126
+ # # Get the start and end dates
127
+ # start_date = st.date_input('Start Date', value=default_val, min_value=min_val, max_value=max_val, key=f'Start Date - {metric}')
128
+ # end_date = st.date_input('End Date', value=max_val, min_value=min_val, max_value=max_val, key=f'End Date - {metric}')
129
+
130
+
131
+ # Get the dates with available data for that field when the user clicks the button
132
+ # get_dates_button = st.button(f'Get Dates for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
133
+ # key=f'Get Dates Button - {metric}',
134
+ # help='Click to get the dates with available data for the selected field',
135
+ # use_container_width=True, type='primary')
136
+ # if get_dates_button:
137
+ if True:
138
+ start_date = '2024-01-01'
139
+ today = datetime.today()
140
+ end_date = today.strftime('%Y-%m-%d')
141
+ year = '2024'
142
+
143
+ dates = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
144
+ # Add None to the end of the list to be used as a default value
145
+ # dates.append(-1)
146
+ #sort the dates from earliest to today
147
+ dates = sorted(dates)
148
+
149
+ #Add the dates to the session state
150
+ st.session_state['dates'] = dates
151
+
152
+ # Display the dropdown menu
153
+ if len(dates) > 0:
154
+ date = st.selectbox('Select Observation Date: ', dates, index=len(dates)-1, key=f'Select Date Dropdown Menu - {metric}')
155
+ if date != -1:
156
+ st.write('You selected:', date)
157
+ #Add the date to the session state
158
+ st.session_state['date'] = date
159
+ else:
160
+ st.write('Please Select A Date')
161
+ else:
162
+ st.info('No dates available for the selected field and dates range, select a different range or click the button to fetch the dates again')
163
+
164
+
165
+ st.markdown('---')
166
+ st.header('Show Field Data')
167
+
168
+ # If a field and a date are selected, display the field data
169
+ if date != -1:
170
+
171
+ # Get the field data at the selected date
172
+ with st.spinner('Loading Field Data...'):
173
+ # Get the metric data and cloud cover data for the selected field and date
174
+ metric_data = get_cuarted_df_for_field(src_df, field_name, date, metric, client_name)
175
+ cloud_cover_data = get_cuarted_df_for_field(src_df, field_name, date, 'CLP', client_name)
176
+
177
+ #Merge the metric and cloud cover data on the geometry column
178
+ field_data = metric_data.merge(cloud_cover_data, on='geometry')
179
+
180
+ # Display the field data
181
+ st.write(f'Field Data for {field_name} (Field ID: {field_name}) on {date}')
182
+ st.write(field_data.head(2))
183
+
184
+ #Get Avarage Cloud Cover
185
+ avg_clp = field_data[f'CLP_{date}'].mean() *100
186
+
187
+ # If the avarage cloud cover is greater than 80%, display a warning message
188
+ if avg_clp > 80:
189
+ st.warning(f'⚠️ The Avarage Cloud Cover is {avg_clp}%')
190
+ st.info('Please Select A Different Date')
191
+
192
+ ## Generate the field data Map ##
193
+
194
+ #Title, Colormap and Legend
195
+ title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {date}'
196
+ cmap = 'RdYlGn'
197
+
198
+ # Create a map of the field data
199
+ field_data_map = field_data.explore(
200
+ column=f'{metric}_{date}',
201
+ cmap=cmap,
202
+ legend=True,
203
+ vmin=0,
204
+ vmax=1,
205
+ marker_type='circle', marker_kwds={'radius':5.3, 'fill':True})
206
+
207
+ # Add Google Satellite as a base map
208
+ google_map = utils.basemaps['Google Satellite']
209
+ google_map.add_to(field_data_map)
210
+
211
+ # Display the map
212
+ st_folium(field_data_map, width = 725, key=f'Field Data Map - {metric}')
213
+
214
+
215
+ #Dwonload Links
216
+
217
+ # If the field data is not empty, display the download links
218
+ if len(field_data) > 0:
219
+ # Create two columns for the download links
220
+ download_as_shp_col, download_as_tiff_col = st.columns(2)
221
+
222
+ # Create a shapefile of the field data and add a download link
223
+ with download_as_shp_col:
224
+
225
+ #Set the shapefile name and path based on the field id, metric and date
226
+ extension = 'shp'
227
+ shapefilename = f"{field_name}_{metric}_{date}.{extension}"
228
+ path = f'./shapefiles/{field_name}/{metric}/{extension}'
229
+
230
+ # Create the target directory if it doesn't exist
231
+ os.makedirs(path, exist_ok=True)
232
+
233
+ # Save the field data as a shapefile
234
+ field_data.to_file(f'{path}/{shapefilename}')
235
+
236
+ # Create a zip file of the shapefile
237
+ files = []
238
+ for i in os.listdir(path):
239
+ if os.path.isfile(os.path.join(path,i)):
240
+ if i[0:len(shapefilename)] == shapefilename:
241
+ files.append(os.path.join(path,i))
242
+ zipFileName = f'{path}/{field_name}_{metric}_{date}.zip'
243
+ zipObj = ZipFile(zipFileName, 'w')
244
+ for file in files:
245
+ zipObj.write(file)
246
+ zipObj.close()
247
+
248
+ # Add a download link for the zip file
249
+ with open(zipFileName, 'rb') as f:
250
+ st.download_button('Download as ShapeFile', f,file_name=zipFileName)
251
+
252
+ # Get the tiff file path and create a download link
253
+ with download_as_tiff_col:
254
+ #get the tiff file path
255
+ tiff_path = utils.get_masked_location_img_path(client_name, metric, date, field_name)
256
+ # Add a download link for the tiff file
257
+ donwnload_filename = f'{metric}_{field_name}_{date}.tiff'
258
+ with open(tiff_path, 'rb') as f:
259
+ st.download_button('Download as Tiff File', f,file_name=donwnload_filename)
260
+
261
+ else:
262
+ st.info('Please Select A Field and A Date')
263
+
264
+
265
+ # st.markdown('---')
266
+ # st.header('Show Historic Averages')
267
+
268
+
269
+ # #Let the user select the year, start date and end date
270
+ # with st.expander('Select Year, Start Date and End Date'):
271
+ # # Get the year
272
+ # years = [f'20{i}' for i in range(22, 25)]
273
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages')
274
+
275
+ # # Set the start and end dates to the first and last dates of the year
276
+ # start_date = f'{year}-01-01'
277
+ # end_date = f'{year}-12-31'
278
+
279
+ # # Get the dates for historic averages
280
+ # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
281
+
282
+ # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
283
+ # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
284
+ # historic_avarages_dates_for_field.sort()
285
+ # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
286
+
287
+ # # Get the number of dates
288
+ # num_historic_dates = len(historic_avarages_dates_for_field)
289
+ # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
290
+
291
+ # # Display the historic averages when the user clicks the button
292
+ # display_historic_avgs_button = st.button(f'Display Historic Averages for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
293
+ # key=f'Display Historic Averages Button - {metric}',
294
+ # help='Click to display the historic averages for the selected field',
295
+ # use_container_width=True, type='primary')
296
+
297
+ # # If the button is clicked, display the historic averages
298
+ # if display_historic_avgs_button:
299
+
300
+ # #Initlize the historic averages cache dir and file path
301
+ # historic_avarages_cache_dir = './historic_avarages_cache'
302
+ # historic_avarages_cache_path = f'{historic_avarages_cache_dir}/historic_avarages_cache.joblib'
303
+ # historic_avarages_cache_clp_path = f'{historic_avarages_cache_dir}/historic_avarages_cache_clp.joblib'
304
+
305
+ # # Load the historic averages cache if it exists, else create it
306
+ # if os.path.exists(historic_avarages_cache_path):
307
+ # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
308
+ # else:
309
+ # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
310
+ # joblib.dump({}, historic_avarages_cache_path)
311
+ # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
312
+ # if os.path.exists(historic_avarages_cache_clp_path):
313
+ # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
314
+ # else:
315
+ # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
316
+ # joblib.dump({}, historic_avarages_cache_clp_path)
317
+ # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
318
+
319
+ # found_in_cache = False
320
+ # if client_name not in historic_avarages_cache:
321
+ # historic_avarages_cache[client_name] = {}
322
+ # if metric not in historic_avarages_cache[client_name]:
323
+ # historic_avarages_cache[client_name][metric] = {}
324
+ # if field_name not in historic_avarages_cache[client_name][metric]:
325
+ # historic_avarages_cache[client_name][metric][field_name] = {}
326
+ # if year not in historic_avarages_cache[client_name][metric][field_name]:
327
+ # historic_avarages_cache[client_name][metric][field_name][year] = {}
328
+ # if len(historic_avarages_cache[client_name][metric][field_name][year]) > 0:
329
+ # found_in_cache = True
330
+
331
+
332
+ # #Check if the field and year are in the cache_clp for the current metric and client
333
+ # found_in_cache_clp = False
334
+ # if client_name not in historic_avarages_cache_clp:
335
+ # historic_avarages_cache_clp[client_name] = {}
336
+ # if 'CLP' not in historic_avarages_cache_clp[client_name]:
337
+ # historic_avarages_cache_clp[client_name]['CLP'] = {}
338
+ # if field_name not in historic_avarages_cache_clp[client_name]['CLP']:
339
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name] = {}
340
+ # if year not in historic_avarages_cache_clp[client_name]['CLP'][field_name]:
341
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name][year] = {}
342
+ # if len(historic_avarages_cache_clp[client_name]['CLP'][field_name][year]) > 0:
343
+ # found_in_cache_clp = True
344
+
345
+
346
+ # # If Found in cache, get the historic averages from the cache
347
+ # if found_in_cache and found_in_cache_clp:
348
+ # st.info('Found Historic Averages in Cache')
349
+ # historic_avarages = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages']
350
+ # historic_avarages_dates = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates']
351
+ # historic_avarages_clp = historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp']
352
+
353
+ # # Else, calculate the historic averages and add them to the cache
354
+ # else:
355
+ # st.info('Calculating Historic Averages...')
356
+
357
+
358
+ # #Empty lists for the historic averages , dates and cloud cover
359
+ # historic_avarages = []
360
+ # historic_avarages_dates = []
361
+ # historic_avarages_clp = []
362
+
363
+ # # Get the historic averages
364
+ # dates_for_field_bar = st.progress(0)
365
+ # with st.spinner('Calculating Historic Averages...'):
366
+ # with st.empty():
367
+ # for i in range(num_historic_dates):
368
+ # # Get the historic average for the current date
369
+ # current_date = historic_avarages_dates_for_field[i]
370
+ # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
371
+ # current_df_clp = get_cuarted_df_for_field(src_df, field_name, current_date, 'CLP', client_name)
372
+ # current_avg = current_df[f'{metric}_{current_date}'].mean()
373
+ # current_avg_clp = current_df_clp[f'CLP_{current_date}'].mean()
374
+ # # Add the historic average and date to the lists
375
+ # historic_avarages.append(current_avg)
376
+ # historic_avarages_dates.append(current_date)
377
+ # historic_avarages_clp.append(current_avg_clp)
378
+ # # Update the progress bar
379
+ # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
380
+
381
+ # # Create a plot of the historic averages with the cloud cover as dashed line and dates as x axis (rotated 90 degrees when needed)
382
+ # fig, ax = plt.subplots(figsize=(5, 3))
383
+
384
+ # # Set the x axis ticks and labels
385
+ # x = historic_avarages_dates
386
+ # x_ticks = [i for i in range(len(x))]
387
+ # ax.set_xticks(x_ticks)
388
+
389
+ # #Set rotation to 90 degrees if the number of dates is greater than 10
390
+ # rot = 0 if len(x) < 10 else 90
391
+ # ax.set_xticklabels(x, rotation=rot)
392
+
393
+ # # Set the y axis ticks and labels
394
+ # y1 = historic_avarages
395
+ # y2 = historic_avarages_clp
396
+ # y_ticks = [i/10 for i in range(11)]
397
+ # ax.set_yticks(y_ticks)
398
+ # ax.set_yticklabels(y_ticks)
399
+
400
+ # # Plot the historic averages and cloud cover
401
+ # ax.plot(x_ticks, y1, label=f'{metric} Historic Averages')
402
+ # ax.plot(x_ticks, y2, '--', label='Cloud Cover')
403
+ # ax.legend()
404
+
405
+ # # Set the title and axis labels
406
+ # ax.set_title(f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
407
+ # ax.set_xlabel('Date')
408
+ # ax.set_ylabel(f'{metric} Historic Averages')
409
+
410
+ # # Display the plot
411
+ # st.pyplot(fig, use_container_width=True)
412
+
413
+ # # Add the historic averages to the cache
414
+ # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages'] = historic_avarages
415
+ # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates'] = historic_avarages_dates
416
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp'] = historic_avarages_clp
417
+ # # Save the cache
418
+ # joblib.dump(historic_avarages_cache, historic_avarages_cache_path)
419
+ # joblib.dump(historic_avarages_cache_clp, historic_avarages_cache_clp_path)
420
+ # # Tell the user that the historic averages are saved in the cache
421
+ # st.info('Historic Averages Saved in Cache')
422
+ # st.write(f'Cache Path: {historic_avarages_cache_path}')
423
+ # st.write(f'Cache CLP Path: {historic_avarages_cache_clp_path}')
424
+
425
+
426
+ # # Display the historic averages in nice plotly plot
427
+ # fig = make_subplots(specs=[[{"secondary_y": True}]])
428
+
429
+ # # Add the historic averages to the plot
430
+ # fig.add_trace(
431
+ # go.Scatter(x=historic_avarages_dates, y=historic_avarages, name=f'{metric} Historic Averages'),
432
+ # secondary_y=False,
433
+ # )
434
+
435
+ # # Add the cloud cover to the plot
436
+ # fig.add_trace(
437
+ # go.Scatter(x=historic_avarages_dates, y=historic_avarages_clp, name='Cloud Cover'),
438
+ # secondary_y=True,
439
+ # )
440
+
441
+ # # Set the title and axis labels
442
+ # fig.update_layout(title_text=f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
443
+ # fig.update_xaxes(title_text='Date')
444
+ # fig.update_yaxes(title_text=f'{metric} Historic Averages', secondary_y=False)
445
+ # fig.update_yaxes(title_text='Cloud Cover', secondary_y=True)
446
+
447
+ # # Display the plot
448
+ # st.plotly_chart(fig)
449
+
450
+
451
+ # st.markdown('---')
452
+ # st.header('Show Historic GIF')
453
+
454
+
455
+ # #Let the user select the year, start date and end date of the GIF
456
+ # with st.expander('Select Year, Start Date and End Date of the GIF'):
457
+ # # Get the year
458
+ # years = [f'20{i}' for i in range(16, 23)]
459
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages GIF')
460
+
461
+ # # Set the start and end dates to the first and last dates of the year
462
+ # start_date = f'{year}-01-01'
463
+ # end_date = f'{year}-12-31'
464
+
465
+ # # Get the dates for historic GIF
466
+ # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
467
+
468
+ # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
469
+ # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
470
+ # historic_avarages_dates_for_field.sort()
471
+ # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
472
+
473
+ # # Get the number of dates
474
+ # num_historic_dates = len(historic_avarages_dates_for_field)
475
+ # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
476
+
477
+ # # Display the historic GIF when the user clicks the button
478
+ # display_historic_GIF_button = st.button(f'Display Historic GIF for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
479
+ # key=f'Display Historic GIF Button - {metric}',
480
+ # help='Click to display the historic GIF for the selected field',
481
+ # use_container_width=True, type='primary')
482
+
483
+ # # If the button is clicked, display the historic GIF
484
+ # if display_historic_GIF_button:
485
+
486
+ # #Initlize the historic GIF imgs and dates
487
+ # st.info('Generating Historic GIF...')
488
+ # historic_imgs = []
489
+ # historic_imgs_dates = []
490
+
491
+ # # Gen the historic GIF
492
+ # dates_for_field_bar = st.progress(0)
493
+ # with st.spinner('Generating Historic GIF...'):
494
+ # with st.empty():
495
+ # for i in range(num_historic_dates):
496
+ # current_date = historic_avarages_dates_for_field[i]
497
+ # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
498
+ # historic_imgs.append(current_df)
499
+ # historic_imgs_dates.append(current_date)
500
+ # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
501
+
502
+ # # Create a fig of the historic Img
503
+ # fig, ax = plt.subplots(figsize=(10, 5))
504
+
505
+ # # Get the current img
506
+ # current_df_lat_lon = utils.add_lat_lon_to_gdf_from_geometry(current_df)
507
+ # current_img = utils.gdf_column_to_one_band_array(current_df_lat_lon, f'{metric}_{current_date}')
508
+
509
+ # # Plot the historic Img
510
+ # title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {current_date}'
511
+ # ax.imshow(current_img)
512
+ # ax.set_title(title)
513
+
514
+ # # Display the plot
515
+ # st.pyplot(fig)
516
+
517
+ # # Create the historic GIF
518
+ # historic_GIF_name = f'{metric}_{field_name}_{year}.gif'
519
+ # st.write('Creating Historic GIF...', historic_GIF_name)
520
+
521
+
522
+ def monitor_fields():
523
+ current_user = greeting("Let's take a look how these fields are doing")
524
+ if os.path.exists(f"fields_{current_user}.parquet"):
525
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
526
+ else:
527
+ st.info("No Fields Added Yet!")
528
+ return
529
+ # st.info("Hover over the field to show the properties or check the Existing Fields List below")
530
+ # fields_map = gdf.explore()
531
+ # sat_basemap = utils.basemaps['Google Satellite']
532
+ # sat_basemap.add_to(fields_map)
533
+ # folium.LayerControl().add_to(fields_map)
534
+ # # output = st_folium(fields_map, key="edit_map", height=300, width=600)
535
+ # folium_static(fields_map, height=300, width=600)
536
+
537
+ with st.expander("Existing Fields List", expanded=False):
538
+ st.write(gdf)
539
+
540
+ field_name = select_field(gdf)
541
+ if field_name == "Select Field":
542
+ st.info("No Field Selected Yet!")
543
+
544
+ else:
545
+ with st.expander("Metrics Explanation", expanded=False):
546
+ st.write("NDVI: Normalized Difference Vegetation Index, Mainly used to monitor the health of vegetation")
547
+ st.write("LAI: Leaf Area Index, Mainly used to monitor the productivity of vegetation")
548
+ st.write("CAB: Chlorophyll Absorption in the Blue band, Mainly used to monitor the chlorophyll content in vegetation")
549
+ st.write("NDMI: Normalized Difference Moisture Index, Mainly used to monitor the moisture content in vegetation")
550
+ st.success("More metrics and analysis features will be added soon")
551
+ metric = st.radio("Select Metric to Monitor", ["NDVI", "LAI", "CAB", "NDMI"], key="metric", index=0, help="Select the metric to monitor")
552
+ st.write(f"Monitoring {metric} for {field_name}")
553
+
554
+ track(metric, field_name, gdf, current_user)
555
+
556
+
557
+
558
+
559
+ if __name__ == '__main__':
560
+ check_authentication()
561
+ monitor_fields()
pag/monitor.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import utils
3
+ import streamlit as st
4
+ import geopandas as gpd
5
+ from streamlit_folium import st_folium, folium_static
6
+ from authentication import greeting, check_password
7
+ import folium
8
+ from senHub import SenHub
9
+ from datetime import datetime
10
+ from sentinelhub import SHConfig, MimeType
11
+ import requests
12
+ import process
13
+ import joblib
14
+ from zipfile import ZipFile
15
+ import matplotlib.pyplot as plt
16
+ from plotly.subplots import make_subplots
17
+ import plotly.graph_objects as go
18
+
19
+ def check_authentication():
20
+ if not check_password():
21
+ st.stop()
22
+
23
+
24
+
25
+ config = SHConfig()
26
+ config.instance_id = '6c220beb-90c4-4131-b658-10cddd8d97b9'
27
+ config.sh_client_id = '17e7c154-7f2d-4139-b1af-cef762385079'
28
+ config.sh_client_secret = 'KvbQMKZB85ZWEgWuxqiWIVEvTAQEfoF9'
29
+
30
+
31
+ def select_field(gdf):
32
+ names = gdf['name'].tolist()
33
+ names.append("Select Field")
34
+ field_name = st.selectbox("Select Field", options=names, key="field_name_monitor", help="Select the field to edit", index=len(names)-1)
35
+ return field_name
36
+
37
+
38
+ def calculate_bbox(df, field):
39
+ bbox = df.loc[df['name'] == field].bounds
40
+ r = bbox.iloc[0]
41
+ return [r.minx, r.miny, r.maxx, r.maxy]
42
+
43
+ def get_available_dates_for_field(df, field, year, start_date='', end_date=''):
44
+ bbox = calculate_bbox(df, field)
45
+ token = SenHub(config).token
46
+ headers = utils.get_bearer_token_headers(token)
47
+ if start_date == '' or end_date == '':
48
+ start_date = f'{year}-01-01'
49
+ end_date = f'{year}-12-31'
50
+ data = f'{{ "collections": [ "sentinel-2-l2a" ], "datetime": "{start_date}T00:00:00Z/{end_date}T23:59:59Z", "bbox": {bbox}, "limit": 100, "distinct": "date" }}'
51
+ response = requests.post('https://services.sentinel-hub.com/api/v1/catalog/search', headers=headers, data=data)
52
+ try:
53
+ features = response.json()['features']
54
+ except:
55
+ print(response.json())
56
+ features = []
57
+ return features
58
+
59
+ @st.cache_data
60
+ def get_and_cache_available_dates(_df, field, year, start_date, end_date):
61
+ dates = get_available_dates_for_field(_df, field, year, start_date, end_date)
62
+ print(f'Caching Dates for {field}')
63
+ return dates
64
+
65
+
66
+
67
+
68
+ def get_cuarted_df_for_field(df, field, date, metric, clientName):
69
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
70
+ if curated_date_path is not None:
71
+ curated_df = gpd.read_file(curated_date_path)
72
+ else:
73
+ process.Download_image_in_given_date(clientName, metric, df, field, date)
74
+ process.mask_downladed_image(clientName, metric, df, field, date)
75
+ process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
76
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
77
+ curated_df = gpd.read_file(curated_date_path)
78
+ return curated_df
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+ def get_cuarted_df_for_field(df, field, date, metric, clientName):
88
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
89
+ if curated_date_path is not None:
90
+ curated_df = gpd.read_file(curated_date_path)
91
+ else:
92
+ process.Download_image_in_given_date(clientName, metric, df, field, date)
93
+ process.mask_downladed_image(clientName, metric, df, field, date)
94
+ process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
95
+ curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
96
+ curated_df = gpd.read_file(curated_date_path)
97
+ return curated_df
98
+
99
+ def track(metric, field_name, src_df, client_name):
100
+
101
+ dates = []
102
+ date = -1
103
+ if 'dates' not in st.session_state:
104
+ st.session_state['dates'] = dates
105
+ else:
106
+ dates = st.session_state['dates']
107
+ if 'date' not in st.session_state:
108
+ st.session_state['date'] = date
109
+ else:
110
+ date = st.session_state['date']
111
+
112
+ # Give the user the option to select year, start date and end date
113
+ # with st.expander('Select Year, Start Date and End Date'):
114
+ # # Get the year
115
+ # years = [f'20{i}' for i in range(22, 25)]
116
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}')
117
+
118
+ # # Set the min, max and default values for start and end dates
119
+ # min_val = f'{year}-01-01'
120
+ # max_val = f'{year}-12-31'
121
+ # default_val = f'{year}-11-01'
122
+ # min_val = datetime.strptime(min_val, '%Y-%m-%d')
123
+ # max_val = datetime.strptime(max_val, '%Y-%m-%d')
124
+ # default_val = datetime.strptime(default_val, '%Y-%m-%d')
125
+
126
+ # # Get the start and end dates
127
+ # start_date = st.date_input('Start Date', value=default_val, min_value=min_val, max_value=max_val, key=f'Start Date - {metric}')
128
+ # end_date = st.date_input('End Date', value=max_val, min_value=min_val, max_value=max_val, key=f'End Date - {metric}')
129
+
130
+
131
+ # Get the dates with available data for that field when the user clicks the button
132
+ # get_dates_button = st.button(f'Get Dates for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
133
+ # key=f'Get Dates Button - {metric}',
134
+ # help='Click to get the dates with available data for the selected field',
135
+ # use_container_width=True, type='primary')
136
+ # if get_dates_button:
137
+ if True:
138
+ start_date = '2024-01-01'
139
+ today = datetime.today()
140
+ end_date = today.strftime('%Y-%m-%d')
141
+ year = '2024'
142
+
143
+ dates = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
144
+ # Add None to the end of the list to be used as a default value
145
+ # dates.append(-1)
146
+ #sort the dates from earliest to today
147
+ dates = sorted(dates)
148
+
149
+ #Add the dates to the session state
150
+ st.session_state['dates'] = dates
151
+
152
+ # Display the dropdown menu
153
+ if len(dates) > 0:
154
+ date = st.selectbox('Select Observation Date: ', dates, index=len(dates)-1, key=f'Select Date Dropdown Menu - {metric}')
155
+ if date != -1:
156
+ st.write('You selected:', date)
157
+ #Add the date to the session state
158
+ st.session_state['date'] = date
159
+ else:
160
+ st.write('Please Select A Date')
161
+ else:
162
+ st.info('No dates available for the selected field and dates range, select a different range or click the button to fetch the dates again')
163
+
164
+
165
+ st.markdown('---')
166
+ st.header('Show Field Data')
167
+
168
+ # If a field and a date are selected, display the field data
169
+ if date != -1:
170
+
171
+ # Get the field data at the selected date
172
+ with st.spinner('Loading Field Data...'):
173
+ # Get the metric data and cloud cover data for the selected field and date
174
+ metric_data = get_cuarted_df_for_field(src_df, field_name, date, metric, client_name)
175
+ cloud_cover_data = get_cuarted_df_for_field(src_df, field_name, date, 'CLP', client_name)
176
+
177
+ #Merge the metric and cloud cover data on the geometry column
178
+ field_data = metric_data.merge(cloud_cover_data, on='geometry')
179
+
180
+ # Display the field data
181
+ st.write(f'Field Data for {field_name} (Field ID: {field_name}) on {date}')
182
+ st.write(field_data.head(2))
183
+
184
+ #Get Avarage Cloud Cover
185
+ avg_clp = field_data[f'CLP_{date}'].mean() *100
186
+
187
+ # If the avarage cloud cover is greater than 80%, display a warning message
188
+ if avg_clp > 80:
189
+ st.warning(f'⚠️ The Avarage Cloud Cover is {avg_clp}%')
190
+ st.info('Please Select A Different Date')
191
+
192
+ ## Generate the field data Map ##
193
+
194
+ #Title, Colormap and Legend
195
+ title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {date}'
196
+ cmap = 'RdYlGn'
197
+
198
+ # Create a map of the field data
199
+ field_data_map = field_data.explore(
200
+ column=f'{metric}_{date}',
201
+ cmap=cmap,
202
+ legend=True,
203
+ vmin=0,
204
+ vmax=1,
205
+ marker_type='circle', marker_kwds={'radius':5.3, 'fill':True})
206
+
207
+ # Add Google Satellite as a base map
208
+ google_map = utils.basemaps['Google Satellite']
209
+ google_map.add_to(field_data_map)
210
+
211
+ # Display the map
212
+ st_folium(field_data_map, width = 725, key=f'Field Data Map - {metric}')
213
+
214
+
215
+ #Dwonload Links
216
+
217
+ # If the field data is not empty, display the download links
218
+ if len(field_data) > 0:
219
+ # Create two columns for the download links
220
+ download_as_shp_col, download_as_tiff_col = st.columns(2)
221
+
222
+ # Create a shapefile of the field data and add a download link
223
+ with download_as_shp_col:
224
+
225
+ #Set the shapefile name and path based on the field id, metric and date
226
+ extension = 'shp'
227
+ shapefilename = f"{field_name}_{metric}_{date}.{extension}"
228
+ path = f'./shapefiles/{field_name}/{metric}/{extension}'
229
+
230
+ # Create the target directory if it doesn't exist
231
+ os.makedirs(path, exist_ok=True)
232
+
233
+ # Save the field data as a shapefile
234
+ field_data.to_file(f'{path}/{shapefilename}')
235
+
236
+ # Create a zip file of the shapefile
237
+ files = []
238
+ for i in os.listdir(path):
239
+ if os.path.isfile(os.path.join(path,i)):
240
+ if i[0:len(shapefilename)] == shapefilename:
241
+ files.append(os.path.join(path,i))
242
+ zipFileName = f'{path}/{field_name}_{metric}_{date}.zip'
243
+ zipObj = ZipFile(zipFileName, 'w')
244
+ for file in files:
245
+ zipObj.write(file)
246
+ zipObj.close()
247
+
248
+ # Add a download link for the zip file
249
+ with open(zipFileName, 'rb') as f:
250
+ st.download_button('Download as ShapeFile', f,file_name=zipFileName)
251
+
252
+ # Get the tiff file path and create a download link
253
+ with download_as_tiff_col:
254
+ #get the tiff file path
255
+ tiff_path = utils.get_masked_location_img_path(client_name, metric, date, field_name)
256
+ # Add a download link for the tiff file
257
+ donwnload_filename = f'{metric}_{field_name}_{date}.tiff'
258
+ with open(tiff_path, 'rb') as f:
259
+ st.download_button('Download as Tiff File', f,file_name=donwnload_filename)
260
+
261
+ else:
262
+ st.info('Please Select A Field and A Date')
263
+
264
+
265
+ # st.markdown('---')
266
+ # st.header('Show Historic Averages')
267
+
268
+
269
+ # #Let the user select the year, start date and end date
270
+ # with st.expander('Select Year, Start Date and End Date'):
271
+ # # Get the year
272
+ # years = [f'20{i}' for i in range(22, 25)]
273
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages')
274
+
275
+ # # Set the start and end dates to the first and last dates of the year
276
+ # start_date = f'{year}-01-01'
277
+ # end_date = f'{year}-12-31'
278
+
279
+ # # Get the dates for historic averages
280
+ # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
281
+
282
+ # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
283
+ # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
284
+ # historic_avarages_dates_for_field.sort()
285
+ # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
286
+
287
+ # # Get the number of dates
288
+ # num_historic_dates = len(historic_avarages_dates_for_field)
289
+ # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
290
+
291
+ # # Display the historic averages when the user clicks the button
292
+ # display_historic_avgs_button = st.button(f'Display Historic Averages for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
293
+ # key=f'Display Historic Averages Button - {metric}',
294
+ # help='Click to display the historic averages for the selected field',
295
+ # use_container_width=True, type='primary')
296
+
297
+ # # If the button is clicked, display the historic averages
298
+ # if display_historic_avgs_button:
299
+
300
+ # #Initlize the historic averages cache dir and file path
301
+ # historic_avarages_cache_dir = './historic_avarages_cache'
302
+ # historic_avarages_cache_path = f'{historic_avarages_cache_dir}/historic_avarages_cache.joblib'
303
+ # historic_avarages_cache_clp_path = f'{historic_avarages_cache_dir}/historic_avarages_cache_clp.joblib'
304
+
305
+ # # Load the historic averages cache if it exists, else create it
306
+ # if os.path.exists(historic_avarages_cache_path):
307
+ # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
308
+ # else:
309
+ # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
310
+ # joblib.dump({}, historic_avarages_cache_path)
311
+ # historic_avarages_cache = joblib.load(historic_avarages_cache_path)
312
+ # if os.path.exists(historic_avarages_cache_clp_path):
313
+ # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
314
+ # else:
315
+ # os.makedirs(historic_avarages_cache_dir, exist_ok=True)
316
+ # joblib.dump({}, historic_avarages_cache_clp_path)
317
+ # historic_avarages_cache_clp = joblib.load(historic_avarages_cache_clp_path)
318
+
319
+ # found_in_cache = False
320
+ # if client_name not in historic_avarages_cache:
321
+ # historic_avarages_cache[client_name] = {}
322
+ # if metric not in historic_avarages_cache[client_name]:
323
+ # historic_avarages_cache[client_name][metric] = {}
324
+ # if field_name not in historic_avarages_cache[client_name][metric]:
325
+ # historic_avarages_cache[client_name][metric][field_name] = {}
326
+ # if year not in historic_avarages_cache[client_name][metric][field_name]:
327
+ # historic_avarages_cache[client_name][metric][field_name][year] = {}
328
+ # if len(historic_avarages_cache[client_name][metric][field_name][year]) > 0:
329
+ # found_in_cache = True
330
+
331
+
332
+ # #Check if the field and year are in the cache_clp for the current metric and client
333
+ # found_in_cache_clp = False
334
+ # if client_name not in historic_avarages_cache_clp:
335
+ # historic_avarages_cache_clp[client_name] = {}
336
+ # if 'CLP' not in historic_avarages_cache_clp[client_name]:
337
+ # historic_avarages_cache_clp[client_name]['CLP'] = {}
338
+ # if field_name not in historic_avarages_cache_clp[client_name]['CLP']:
339
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name] = {}
340
+ # if year not in historic_avarages_cache_clp[client_name]['CLP'][field_name]:
341
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name][year] = {}
342
+ # if len(historic_avarages_cache_clp[client_name]['CLP'][field_name][year]) > 0:
343
+ # found_in_cache_clp = True
344
+
345
+
346
+ # # If Found in cache, get the historic averages from the cache
347
+ # if found_in_cache and found_in_cache_clp:
348
+ # st.info('Found Historic Averages in Cache')
349
+ # historic_avarages = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages']
350
+ # historic_avarages_dates = historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates']
351
+ # historic_avarages_clp = historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp']
352
+
353
+ # # Else, calculate the historic averages and add them to the cache
354
+ # else:
355
+ # st.info('Calculating Historic Averages...')
356
+
357
+
358
+ # #Empty lists for the historic averages , dates and cloud cover
359
+ # historic_avarages = []
360
+ # historic_avarages_dates = []
361
+ # historic_avarages_clp = []
362
+
363
+ # # Get the historic averages
364
+ # dates_for_field_bar = st.progress(0)
365
+ # with st.spinner('Calculating Historic Averages...'):
366
+ # with st.empty():
367
+ # for i in range(num_historic_dates):
368
+ # # Get the historic average for the current date
369
+ # current_date = historic_avarages_dates_for_field[i]
370
+ # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
371
+ # current_df_clp = get_cuarted_df_for_field(src_df, field_name, current_date, 'CLP', client_name)
372
+ # current_avg = current_df[f'{metric}_{current_date}'].mean()
373
+ # current_avg_clp = current_df_clp[f'CLP_{current_date}'].mean()
374
+ # # Add the historic average and date to the lists
375
+ # historic_avarages.append(current_avg)
376
+ # historic_avarages_dates.append(current_date)
377
+ # historic_avarages_clp.append(current_avg_clp)
378
+ # # Update the progress bar
379
+ # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
380
+
381
+ # # Create a plot of the historic averages with the cloud cover as dashed line and dates as x axis (rotated 90 degrees when needed)
382
+ # fig, ax = plt.subplots(figsize=(5, 3))
383
+
384
+ # # Set the x axis ticks and labels
385
+ # x = historic_avarages_dates
386
+ # x_ticks = [i for i in range(len(x))]
387
+ # ax.set_xticks(x_ticks)
388
+
389
+ # #Set rotation to 90 degrees if the number of dates is greater than 10
390
+ # rot = 0 if len(x) < 10 else 90
391
+ # ax.set_xticklabels(x, rotation=rot)
392
+
393
+ # # Set the y axis ticks and labels
394
+ # y1 = historic_avarages
395
+ # y2 = historic_avarages_clp
396
+ # y_ticks = [i/10 for i in range(11)]
397
+ # ax.set_yticks(y_ticks)
398
+ # ax.set_yticklabels(y_ticks)
399
+
400
+ # # Plot the historic averages and cloud cover
401
+ # ax.plot(x_ticks, y1, label=f'{metric} Historic Averages')
402
+ # ax.plot(x_ticks, y2, '--', label='Cloud Cover')
403
+ # ax.legend()
404
+
405
+ # # Set the title and axis labels
406
+ # ax.set_title(f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
407
+ # ax.set_xlabel('Date')
408
+ # ax.set_ylabel(f'{metric} Historic Averages')
409
+
410
+ # # Display the plot
411
+ # st.pyplot(fig, use_container_width=True)
412
+
413
+ # # Add the historic averages to the cache
414
+ # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages'] = historic_avarages
415
+ # historic_avarages_cache[client_name][metric][field_name][year]['historic_avarages_dates'] = historic_avarages_dates
416
+ # historic_avarages_cache_clp[client_name]['CLP'][field_name][year]['historic_avarages_clp'] = historic_avarages_clp
417
+ # # Save the cache
418
+ # joblib.dump(historic_avarages_cache, historic_avarages_cache_path)
419
+ # joblib.dump(historic_avarages_cache_clp, historic_avarages_cache_clp_path)
420
+ # # Tell the user that the historic averages are saved in the cache
421
+ # st.info('Historic Averages Saved in Cache')
422
+ # st.write(f'Cache Path: {historic_avarages_cache_path}')
423
+ # st.write(f'Cache CLP Path: {historic_avarages_cache_clp_path}')
424
+
425
+
426
+ # # Display the historic averages in nice plotly plot
427
+ # fig = make_subplots(specs=[[{"secondary_y": True}]])
428
+
429
+ # # Add the historic averages to the plot
430
+ # fig.add_trace(
431
+ # go.Scatter(x=historic_avarages_dates, y=historic_avarages, name=f'{metric} Historic Averages'),
432
+ # secondary_y=False,
433
+ # )
434
+
435
+ # # Add the cloud cover to the plot
436
+ # fig.add_trace(
437
+ # go.Scatter(x=historic_avarages_dates, y=historic_avarages_clp, name='Cloud Cover'),
438
+ # secondary_y=True,
439
+ # )
440
+
441
+ # # Set the title and axis labels
442
+ # fig.update_layout(title_text=f'{metric} Historic Averages for {field_name} (Field ID: {field_name}) in {year}')
443
+ # fig.update_xaxes(title_text='Date')
444
+ # fig.update_yaxes(title_text=f'{metric} Historic Averages', secondary_y=False)
445
+ # fig.update_yaxes(title_text='Cloud Cover', secondary_y=True)
446
+
447
+ # # Display the plot
448
+ # st.plotly_chart(fig)
449
+
450
+
451
+ # st.markdown('---')
452
+ # st.header('Show Historic GIF')
453
+
454
+
455
+ # #Let the user select the year, start date and end date of the GIF
456
+ # with st.expander('Select Year, Start Date and End Date of the GIF'):
457
+ # # Get the year
458
+ # years = [f'20{i}' for i in range(16, 23)]
459
+ # year = st.selectbox('Select Year: ', years, index=len(years)-2, key=f'Select Year Dropdown Menu - {metric}- Historic Averages GIF')
460
+
461
+ # # Set the start and end dates to the first and last dates of the year
462
+ # start_date = f'{year}-01-01'
463
+ # end_date = f'{year}-12-31'
464
+
465
+ # # Get the dates for historic GIF
466
+ # historic_avarages_dates_for_field = get_and_cache_available_dates(src_df, field_name, year, start_date, end_date)
467
+
468
+ # # Convert the dates to datetime objects and sort them ascendingly then convert them back to strings
469
+ # historic_avarages_dates_for_field = [datetime.strptime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
470
+ # historic_avarages_dates_for_field.sort()
471
+ # historic_avarages_dates_for_field = [datetime.strftime(date, '%Y-%m-%d') for date in historic_avarages_dates_for_field]
472
+
473
+ # # Get the number of dates
474
+ # num_historic_dates = len(historic_avarages_dates_for_field)
475
+ # st.write(f' Found {num_historic_dates} dates for field {field_name} in {year} (from {start_date} to {end_date})')
476
+
477
+ # # Display the historic GIF when the user clicks the button
478
+ # display_historic_GIF_button = st.button(f'Display Historic GIF for Field {field_name} (Field ID: {field_name}) in {year} (from {start_date} to {end_date})',
479
+ # key=f'Display Historic GIF Button - {metric}',
480
+ # help='Click to display the historic GIF for the selected field',
481
+ # use_container_width=True, type='primary')
482
+
483
+ # # If the button is clicked, display the historic GIF
484
+ # if display_historic_GIF_button:
485
+
486
+ # #Initlize the historic GIF imgs and dates
487
+ # st.info('Generating Historic GIF...')
488
+ # historic_imgs = []
489
+ # historic_imgs_dates = []
490
+
491
+ # # Gen the historic GIF
492
+ # dates_for_field_bar = st.progress(0)
493
+ # with st.spinner('Generating Historic GIF...'):
494
+ # with st.empty():
495
+ # for i in range(num_historic_dates):
496
+ # current_date = historic_avarages_dates_for_field[i]
497
+ # current_df = get_cuarted_df_for_field(src_df, field_name, current_date, metric, client_name)
498
+ # historic_imgs.append(current_df)
499
+ # historic_imgs_dates.append(current_date)
500
+ # dates_for_field_bar.progress((i + 1)/(num_historic_dates))
501
+
502
+ # # Create a fig of the historic Img
503
+ # fig, ax = plt.subplots(figsize=(10, 5))
504
+
505
+ # # Get the current img
506
+ # current_df_lat_lon = utils.add_lat_lon_to_gdf_from_geometry(current_df)
507
+ # current_img = utils.gdf_column_to_one_band_array(current_df_lat_lon, f'{metric}_{current_date}')
508
+
509
+ # # Plot the historic Img
510
+ # title = f'{metric} for selected field {field_name} (Field ID: {field_name}) in {current_date}'
511
+ # ax.imshow(current_img)
512
+ # ax.set_title(title)
513
+
514
+ # # Display the plot
515
+ # st.pyplot(fig)
516
+
517
+ # # Create the historic GIF
518
+ # historic_GIF_name = f'{metric}_{field_name}_{year}.gif'
519
+ # st.write('Creating Historic GIF...', historic_GIF_name)
520
+
521
+
522
+ def monitor_fields():
523
+ current_user = greeting("Let's take a look how these fields are doing")
524
+ if os.path.exists(f"fields_{current_user}.parquet"):
525
+ gdf = gpd.read_parquet(f"fields_{current_user}.parquet")
526
+ else:
527
+ st.info("No Fields Added Yet!")
528
+ return
529
+ # st.info("Hover over the field to show the properties or check the Existing Fields List below")
530
+ # fields_map = gdf.explore()
531
+ # sat_basemap = utils.basemaps['Google Satellite']
532
+ # sat_basemap.add_to(fields_map)
533
+ # folium.LayerControl().add_to(fields_map)
534
+ # # output = st_folium(fields_map, key="edit_map", height=300, width=600)
535
+ # folium_static(fields_map, height=300, width=600)
536
+
537
+ with st.expander("Existing Fields List", expanded=False):
538
+ st.write(gdf)
539
+
540
+ field_name = select_field(gdf)
541
+ if field_name == "Select Field":
542
+ st.info("No Field Selected Yet!")
543
+
544
+ else:
545
+ with st.expander("Metrics Explanation", expanded=False):
546
+ st.write("NDVI: Normalized Difference Vegetation Index, Mainly used to monitor the health of vegetation")
547
+ st.write("LAI: Leaf Area Index, Mainly used to monitor the productivity of vegetation")
548
+ st.write("CAB: Chlorophyll Absorption in the Blue band, Mainly used to monitor the chlorophyll content in vegetation")
549
+ # st.write("NDMI: Normalized Difference Moisture Index, Mainly used to monitor the moisture content in vegetation")
550
+ st.success("More metrics and analysis features will be added soon")
551
+ metric = st.radio("Select Metric to Monitor", ["NDVI", "LAI", "CAB"], key="metric", index=0, help="Select the metric to monitor")
552
+ st.write(f"Monitoring {metric} for {field_name}")
553
+
554
+ track(metric, field_name, gdf, current_user)
555
+
556
+
557
+
558
+
559
+ if __name__ == '__main__':
560
+ check_authentication()
561
+ monitor_fields()
playground.ipynb ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import folium\n",
10
+ "from folium.plugins import Draw\n",
11
+ "\n",
12
+ "center_start = [15.572363674301132, 32.69167103104079]\n",
13
+ "zoom_start = 13\n",
14
+ "\n",
15
+ "\n",
16
+ "\n",
17
+ "m = folium.Map(location=center_start, zoom_start=zoom_start)\n",
18
+ "\n",
19
+ "\n",
20
+ "draw_options = {'polyline': False, 'polygon': True, 'rectangle': True, 'circle': True, 'marker': False, 'circlemarker': False}\n",
21
+ "Draw(export=True, draw_options=draw_options).add_to(m)\n",
22
+ "#\n",
23
+ "m"
24
+ ]
25
+ },
26
+ {
27
+ "cell_type": "code",
28
+ "execution_count": 17,
29
+ "metadata": {},
30
+ "outputs": [
31
+ {
32
+ "name": "stdout",
33
+ "output_type": "stream",
34
+ "text": [
35
+ "28579648969.878418\n",
36
+ "28579.648969878417\n"
37
+ ]
38
+ },
39
+ {
40
+ "name": "stderr",
41
+ "output_type": "stream",
42
+ "text": [
43
+ "c:\\Users\\Edin\\anaconda3\\envs\\amazon\\lib\\site-packages\\pyproj\\crs\\crs.py:141: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6\n",
44
+ " in_crs_string = _prepare_from_proj_string(in_crs_string)\n",
45
+ "c:\\Users\\Edin\\anaconda3\\envs\\amazon\\lib\\site-packages\\shapely\\ops.py:276: FutureWarning: This function is deprecated. See: https://pyproj4.github.io/pyproj/stable/gotchas.html#upgrading-to-pyproj-2-from-pyproj-1\n",
46
+ " shell = type(geom.exterior)(zip(*func(*zip(*geom.exterior.coords))))\n"
47
+ ]
48
+ }
49
+ ],
50
+ "source": [
51
+ "from shapely.geometry import Polygon\n",
52
+ "import shapely.ops as ops\n",
53
+ "from functools import partial\n",
54
+ "import pyproj\n",
55
+ "edges = [\n",
56
+ " [32.584179, 15.217575],\n",
57
+ " [32.726973, 13.546143],\n",
58
+ " [33.957199, 13.973163],\n",
59
+ " [34.286724, 14.739791],\n",
60
+ " [33.69358, 15.620197],\n",
61
+ " [32.584179, 15.217575]\n",
62
+ " ]\n",
63
+ "geom = Polygon(edges)\n",
64
+ "\n",
65
+ "geom_area = ops.transform(\n",
66
+ " partial(\n",
67
+ " pyproj.transform,\n",
68
+ " pyproj.Proj(init='EPSG:4326'),\n",
69
+ " pyproj.Proj(\n",
70
+ " proj='aea',\n",
71
+ " lat_1=geom.bounds[1],\n",
72
+ " lat_2=geom.bounds[3]\n",
73
+ " )\n",
74
+ " ),\n",
75
+ " geom)\n",
76
+ "\n",
77
+ "# Print the area in m^2\n",
78
+ "print(geom_area.area)\n",
79
+ "#print the area in km^2\n",
80
+ "print(geom_area.area/1000000)"
81
+ ]
82
+ },
83
+ {
84
+ "cell_type": "code",
85
+ "execution_count": null,
86
+ "metadata": {},
87
+ "outputs": [],
88
+ "source": [
89
+ "geom"
90
+ ]
91
+ },
92
+ {
93
+ "cell_type": "code",
94
+ "execution_count": null,
95
+ "metadata": {},
96
+ "outputs": [],
97
+ "source": [
98
+ "m"
99
+ ]
100
+ },
101
+ {
102
+ "cell_type": "code",
103
+ "execution_count": null,
104
+ "metadata": {},
105
+ "outputs": [],
106
+ "source": [
107
+ "import pyproj \n",
108
+ "import shapely\n",
109
+ "import shapely.ops as ops\n",
110
+ "from shapely.geometry.polygon import Polygon\n",
111
+ "from functools import partial\n",
112
+ "\n",
113
+ "\n",
114
+ "\n",
115
+ "\n",
116
+ "geom = Polygon([(0, 0), (0, 10), (10, 10), (10, 0), (0, 0)])\n",
117
+ "geom_area = ops.transform(\n",
118
+ " partial(\n",
119
+ " pyproj.transform,\n",
120
+ " pyproj.Proj(init='EPSG:4326'),\n",
121
+ " pyproj.Proj(\n",
122
+ " proj='aea',\n",
123
+ " lat_1=geom.bounds[1],\n",
124
+ " lat_2=geom.bounds[3]\n",
125
+ " )\n",
126
+ " ),\n",
127
+ " geom)\n",
128
+ "\n",
129
+ "# Print the area in m^2\n",
130
+ "print(geom_area.area)"
131
+ ]
132
+ }
133
+ ],
134
+ "metadata": {
135
+ "kernelspec": {
136
+ "display_name": "amazon",
137
+ "language": "python",
138
+ "name": "python3"
139
+ },
140
+ "language_info": {
141
+ "codemirror_mode": {
142
+ "name": "ipython",
143
+ "version": 3
144
+ },
145
+ "file_extension": ".py",
146
+ "mimetype": "text/x-python",
147
+ "name": "python",
148
+ "nbconvert_exporter": "python",
149
+ "pygments_lexer": "ipython3",
150
+ "version": "3.9.16"
151
+ }
152
+ },
153
+ "nbformat": 4,
154
+ "nbformat_minor": 2
155
+ }
process.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import utils
3
+ import rioxarray as rx
4
+ from senHub import SenHub
5
+ from sentinelhub import SHConfig, MimeType
6
+
7
+
8
+ config = SHConfig()
9
+ config.instance_id = '6c220beb-90c4-4131-b658-10cddd8d97b9'
10
+ config.sh_client_id = '17e7c154-7f2d-4139-b1af-cef762385079'
11
+ config.sh_client_secret = 'KvbQMKZB85ZWEgWuxqiWIVEvTAQEfoF9'
12
+
13
+ def Download_image_in_given_date(clientName, metric, df, field, date, mime_type = MimeType.TIFF):
14
+ sen_obj = SenHub(config, mime_type = mime_type)
15
+ download_path = f'./{clientName}/raw/{metric}/{date}/field_{field}/'
16
+ bbox = utils.calculate_bbox(df, field)
17
+ evalscript = utils.Scripts[metric]
18
+ sen_obj.set_dir(download_path)
19
+ sen_obj.make_bbox(bbox)
20
+ sen_obj.make_request(evalscript, date)
21
+ data = sen_obj.download_data()
22
+ return data
23
+
24
+ def mask_downladed_image(clientName, metric, df, field, date):
25
+ download_path = utils.get_downloaded_location_img_path(clientName, metric, date, field)
26
+ im = rx.open_rasterio(download_path)
27
+ field_vals = df.loc[df['name'] == field]
28
+ field_geom = field_vals.geometry
29
+ crs = field_vals.crs
30
+ clipped = im.rio.clip(field_geom, crs, drop=True)
31
+ save_dir_path = f'./{clientName}/processed/{metric}/{date}/field_{field}/'
32
+ os.makedirs(save_dir_path, exist_ok=True)
33
+ save_tiff_path = save_dir_path + 'masked.tiff'
34
+ clipped.rio.to_raster(save_tiff_path)
35
+ return save_tiff_path
36
+
37
+ def convert_maske_image_to_geodataframe(clientName, metric, df, field, date, crs):
38
+ imagePath = utils.get_masked_location_img_path(clientName, metric, date, field)
39
+ im = rx.open_rasterio(imagePath)
40
+ gdf = utils.tiff_to_geodataframe(im, metric, date, crs)
41
+ save_dir_path = f'./{clientName}/curated/{metric}/{date}/field_{field}/'
42
+ os.makedirs(save_dir_path, exist_ok=True)
43
+ save_geojson_path = save_dir_path + 'masked.geojson'
44
+ gdf.to_file(save_geojson_path, driver='GeoJSON')
45
+ return save_geojson_path
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ confuse==2.0.1
2
+ folium==0.14.0
3
+ geopandas==0.14.3
4
+ mapclassify==2.5.0
5
+ matplotlib==3.7.1
6
+ plotly==5.14.1
7
+ rioxarray==0.14.1
8
+ sentinelhub==3.9.1
9
+ shapely==2.0.3
10
+ streamlit-folium==0.12.0
11
+ streamlit==1.29.0
scripts/cab.js ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3 (auto-converted from 2)
2
+ var degToRad = Math.PI / 180;
3
+
4
+ function evaluatePixelOrig(samples) {
5
+ var sample = samples[0];
6
+ var b03_norm = normalize(sample.B03, 0, 0.253061520471542);
7
+ var b04_norm = normalize(sample.B04, 0, 0.290393577911328);
8
+ var b05_norm = normalize(sample.B05, 0, 0.305398915248555);
9
+ var b06_norm = normalize(sample.B06, 0.006637972542253, 0.608900395797889);
10
+ var b07_norm = normalize(sample.B07, 0.013972727018939, 0.753827384322927);
11
+ var b8a_norm = normalize(sample.B8A, 0.026690138082061, 0.782011770669178);
12
+ var b11_norm = normalize(sample.B11, 0.016388074192258, 0.493761397883092);
13
+ var b12_norm = normalize(sample.B12, 0, 0.493025984460231);
14
+ var viewZen_norm = normalize(Math.cos(sample.viewZenithMean * degToRad), 0.918595400582046, 1);
15
+ var sunZen_norm = normalize(Math.cos(sample.sunZenithAngles * degToRad), 0.342022871159208, 0.936206429175402);
16
+ var relAzim_norm = Math.cos((sample.sunAzimuthAngles - sample.viewAzimuthMean) * degToRad)
17
+
18
+ var n1 = neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
19
+ var n2 = neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
20
+ var n3 = neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
21
+ var n4 = neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
22
+ var n5 = neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
23
+
24
+ var l2 = layer2(n1, n2, n3, n4, n5);
25
+
26
+ var cab = denormalize(l2, 0.007426692959872, 873.908222110306);
27
+ return {
28
+ default: [cab / 300]
29
+ }
30
+ }
31
+
32
+ function neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
33
+ var sum =
34
+ 4.242299670155190
35
+ + 0.400396555256580 * b03_norm
36
+ + 0.607936279259404 * b04_norm
37
+ + 0.137468650780226 * b05_norm
38
+ - 2.955866573461640 * b06_norm
39
+ - 3.186746687729570 * b07_norm
40
+ + 2.206800751246430 * b8a_norm
41
+ - 0.313784336139636 * b11_norm
42
+ + 0.256063547510639 * b12_norm
43
+ - 0.071613219805105 * viewZen_norm
44
+ + 0.510113504210111 * sunZen_norm
45
+ + 0.142813982138661 * relAzim_norm;
46
+
47
+ return tansig(sum);
48
+ }
49
+
50
+ function neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
51
+ var sum =
52
+ - 0.259569088225796
53
+ - 0.250781102414872 * b03_norm
54
+ + 0.439086302920381 * b04_norm
55
+ - 1.160590937522300 * b05_norm
56
+ - 1.861935250269610 * b06_norm
57
+ + 0.981359868451638 * b07_norm
58
+ + 1.634230834254840 * b8a_norm
59
+ - 0.872527934645577 * b11_norm
60
+ + 0.448240475035072 * b12_norm
61
+ + 0.037078083501217 * viewZen_norm
62
+ + 0.030044189670404 * sunZen_norm
63
+ + 0.005956686619403 * relAzim_norm;
64
+
65
+ return tansig(sum);
66
+ }
67
+
68
+ function neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
69
+ var sum =
70
+ + 3.130392627338360
71
+ + 0.552080132568747 * b03_norm
72
+ - 0.502919673166901 * b04_norm
73
+ + 6.105041924966230 * b05_norm
74
+ - 1.294386119140800 * b06_norm
75
+ - 1.059956388352800 * b07_norm
76
+ - 1.394092902418820 * b8a_norm
77
+ + 0.324752732710706 * b11_norm
78
+ - 1.758871822827680 * b12_norm
79
+ - 0.036663679860328 * viewZen_norm
80
+ - 0.183105291400739 * sunZen_norm
81
+ - 0.038145312117381 * relAzim_norm;
82
+
83
+ return tansig(sum);
84
+ }
85
+
86
+ function neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
87
+ var sum =
88
+ + 0.774423577181620
89
+ + 0.211591184882422 * b03_norm
90
+ - 0.248788896074327 * b04_norm
91
+ + 0.887151598039092 * b05_norm
92
+ + 1.143675895571410 * b06_norm
93
+ - 0.753968830338323 * b07_norm
94
+ - 1.185456953076760 * b8a_norm
95
+ + 0.541897860471577 * b11_norm
96
+ - 0.252685834607768 * b12_norm
97
+ - 0.023414901078143 * viewZen_norm
98
+ - 0.046022503549557 * sunZen_norm
99
+ - 0.006570284080657 * relAzim_norm;
100
+
101
+ return tansig(sum);
102
+ }
103
+
104
+ function neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
105
+ var sum =
106
+ + 2.584276648534610
107
+ + 0.254790234231378 * b03_norm
108
+ - 0.724968611431065 * b04_norm
109
+ + 0.731872806026834 * b05_norm
110
+ + 2.303453821021270 * b06_norm
111
+ - 0.849907966921912 * b07_norm
112
+ - 6.425315500537270 * b8a_norm
113
+ + 2.238844558459030 * b11_norm
114
+ - 0.199937574297990 * b12_norm
115
+ + 0.097303331714567 * viewZen_norm
116
+ + 0.334528254938326 * sunZen_norm
117
+ + 0.113075306591838 * relAzim_norm;
118
+
119
+ return tansig(sum);
120
+ }
121
+
122
+ function layer2(neuron1, neuron2, neuron3, neuron4, neuron5) {
123
+ var sum =
124
+ + 0.463426463933822
125
+ - 0.352760040599190 * neuron1
126
+ - 0.603407399151276 * neuron2
127
+ + 0.135099379384275 * neuron3
128
+ - 1.735673123851930 * neuron4
129
+ - 0.147546813318256 * neuron5;
130
+
131
+ return sum;
132
+ }
133
+
134
+ function normalize(unnormalized, min, max) {
135
+ return 2 * (unnormalized - min) / (max - min) - 1;
136
+ }
137
+ function denormalize(normalized, min, max) {
138
+ return 0.5 * (normalized + 1) * (max - min) + min;
139
+ }
140
+ function tansig(input) {
141
+ return 2 / (1 + Math.exp(-2 * input)) - 1;
142
+ }
143
+
144
+ function setup() {
145
+ return {
146
+ input: [{
147
+ bands: [
148
+ "B03",
149
+ "B04",
150
+ "B05",
151
+ "B06",
152
+ "B07",
153
+ "B8A",
154
+ "B11",
155
+ "B12",
156
+ "viewZenithMean",
157
+ "viewAzimuthMean",
158
+ "sunZenithAngles",
159
+ "sunAzimuthAngles"
160
+ ]
161
+ }],
162
+ output: [
163
+ {
164
+ id: "default",
165
+ sampleType: "FLOAT32",
166
+ bands: 1
167
+ }
168
+ ]
169
+ }
170
+ }
171
+
172
+ function evaluatePixel(sample, scene, metadata, customData, outputMetadata) {
173
+ const result = evaluatePixelOrig([sample], [scene], metadata, customData, outputMetadata);
174
+ return result[Object.keys(result)[0]];
175
+ }
scripts/clp.js ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+ function setup(){
3
+ return{
4
+ input: ["CLP"],
5
+ output: [
6
+ {
7
+ sampleType: "FLOAT32",
8
+ bands: 1
9
+ }
10
+ ]
11
+ }
12
+ }
13
+
14
+ function evaluatePixel(sample){
15
+ return [sample.CLP/255];
16
+ }
scripts/fcover.js ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+
3
+
4
+ function setup() {
5
+ return {
6
+ input: [{
7
+ bands: [
8
+ "B03",
9
+ "B04",
10
+ "B05",
11
+ "B06",
12
+ "B07",
13
+ "B8A",
14
+ "B11",
15
+ "B12",
16
+ "viewZenithMean",
17
+ "viewAzimuthMean",
18
+ "sunZenithAngles",
19
+ "sunAzimuthAngles"
20
+ ],
21
+ units: ["REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "DEGREES", "DEGREES", "DEGREES", "DEGREES"]
22
+ }],
23
+ output: [
24
+ {
25
+ id: "default",
26
+ sampleType: "FLOAT32",
27
+ bands: 1
28
+ }
29
+ ]
30
+ }
31
+ }
32
+
33
+ var degToRad = Math.PI / 180;
34
+
35
+ function evaluatePixel(sample) {
36
+ var b03_norm = normalize(sample.B03, 0, 0.253061520472);
37
+ var b04_norm = normalize(sample.B04, 0, 0.290393577911);
38
+ var b05_norm = normalize(sample.B05, 0, 0.305398915249);
39
+ var b06_norm = normalize(sample.B06, 0.00663797254225, 0.608900395798);
40
+ var b07_norm = normalize(sample.B07, 0.0139727270189, 0.753827384323);
41
+ var b8a_norm = normalize(sample.B8A, 0.0266901380821, 0.782011770669);
42
+ var b11_norm = normalize(sample.B11, 0.0163880741923, 0.493761397883);
43
+ var b12_norm = normalize(sample.B12, 0, 0.49302598446);
44
+ var viewZen_norm = normalize(Math.cos(sample.viewZenithMean * degToRad), 0.918595400582, 0.999999999991);
45
+ var sunZen_norm = normalize(Math.cos(sample.sunZenithAngles * degToRad), 0.342022871159, 0.936206429175);
46
+ var relAzim_norm = Math.cos((sample.sunAzimuthAngles - sample.viewAzimuthMean) * degToRad)
47
+
48
+ var n1 = neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
49
+ var n2 = neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
50
+ var n3 = neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
51
+ var n4 = neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
52
+ var n5 = neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
53
+
54
+ var l2 = layer2(n1, n2, n3, n4, n5);
55
+
56
+ var fcover = denormalize(l2, 0.000181230723879, 0.999638214715);
57
+
58
+ return {default: [fcover]}
59
+ }
60
+
61
+ function neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
62
+ var sum =
63
+ - 1.45261652206
64
+ - 0.156854264841 * b03_norm
65
+ + 0.124234528462 * b04_norm
66
+ + 0.235625516229 * b05_norm
67
+ - 1.8323910258 * b06_norm
68
+ - 0.217188969888 * b07_norm
69
+ + 5.06933958064 * b8a_norm
70
+ - 0.887578008155 * b11_norm
71
+ - 1.0808468167 * b12_norm
72
+ - 0.0323167041864 * viewZen_norm
73
+ - 0.224476137359 * sunZen_norm
74
+ - 0.195523962947 * relAzim_norm;
75
+
76
+ return tansig(sum);
77
+ }
78
+
79
+ function neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
80
+ var sum =
81
+ - 1.70417477557
82
+ - 0.220824927842 * b03_norm
83
+ + 1.28595395487 * b04_norm
84
+ + 0.703139486363 * b05_norm
85
+ - 1.34481216665 * b06_norm
86
+ - 1.96881267559 * b07_norm
87
+ - 1.45444681639 * b8a_norm
88
+ + 1.02737560043 * b11_norm
89
+ - 0.12494641532 * b12_norm
90
+ + 0.0802762437265 * viewZen_norm
91
+ - 0.198705918577 * sunZen_norm
92
+ + 0.108527100527 * relAzim_norm;
93
+
94
+ return tansig(sum);
95
+ }
96
+
97
+ function neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
98
+ var sum =
99
+ + 1.02168965849
100
+ - 0.409688743281 * b03_norm
101
+ + 1.08858884766 * b04_norm
102
+ + 0.36284522554 * b05_norm
103
+ + 0.0369390509705 * b06_norm
104
+ - 0.348012590003 * b07_norm
105
+ - 2.0035261881 * b8a_norm
106
+ + 0.0410357601757 * b11_norm
107
+ + 1.22373853174 * b12_norm
108
+ + -0.0124082778287 * viewZen_norm
109
+ - 0.282223364524 * sunZen_norm
110
+ + 0.0994993117557 * relAzim_norm;
111
+
112
+ return tansig(sum);
113
+ }
114
+
115
+ function neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
116
+ var sum =
117
+ - 0.498002810205
118
+ - 0.188970957866 * b03_norm
119
+ - 0.0358621840833 * b04_norm
120
+ + 0.00551248528107 * b05_norm
121
+ + 1.35391570802 * b06_norm
122
+ - 0.739689896116 * b07_norm
123
+ - 2.21719530107 * b8a_norm
124
+ + 0.313216124198 * b11_norm
125
+ + 1.5020168915 * b12_norm
126
+ + 1.21530490195 * viewZen_norm
127
+ - 0.421938358618 * sunZen_norm
128
+ + 1.48852484547 * relAzim_norm;
129
+
130
+ return tansig(sum);
131
+ }
132
+
133
+ function neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
134
+ var sum =
135
+ - 3.88922154789
136
+ + 2.49293993709 * b03_norm
137
+ - 4.40511331388 * b04_norm
138
+ - 1.91062012624 * b05_norm
139
+ - 0.703174115575 * b06_norm
140
+ - 0.215104721138 * b07_norm
141
+ - 0.972151494818 * b8a_norm
142
+ - 0.930752241278 * b11_norm
143
+ + 1.2143441876 * b12_norm
144
+ - 0.521665460192 * viewZen_norm
145
+ - 0.445755955598 * sunZen_norm
146
+ + 0.344111873777 * relAzim_norm;
147
+
148
+ return tansig(sum);
149
+ }
150
+
151
+ function layer2(neuron1, neuron2, neuron3, neuron4, neuron5) {
152
+ var sum =
153
+ - 0.0967998147811
154
+ + 0.23080586765 * neuron1
155
+ - 0.333655484884 * neuron2
156
+ - 0.499418292325 * neuron3
157
+ + 0.0472484396749 * neuron4
158
+ - 0.0798516540739 * neuron5;
159
+
160
+ return sum;
161
+ }
162
+
163
+ function normalize(unnormalized, min, max) {
164
+ return 2 * (unnormalized - min) / (max - min) - 1;
165
+ }
166
+ function denormalize(normalized, min, max) {
167
+ return 0.5 * (normalized + 1) * (max - min) + min;
168
+ }
169
+ function tansig(input) {
170
+ return 2 / (1 + Math.exp(-2 * input)) - 1;
171
+ }
scripts/lai.js ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+
3
+ var degToRad = Math.PI / 180;
4
+
5
+ function evaluatePixelOrig(samples) {
6
+ var sample = samples[0];
7
+ var b03_norm = normalize(sample.B03, 0, 0.253061520471542);
8
+ var b04_norm = normalize(sample.B04, 0, 0.290393577911328);
9
+ var b05_norm = normalize(sample.B05, 0, 0.305398915248555);
10
+ var b06_norm = normalize(sample.B06, 0.006637972542253, 0.608900395797889);
11
+ var b07_norm = normalize(sample.B07, 0.013972727018939, 0.753827384322927);
12
+ var b8a_norm = normalize(sample.B8A, 0.026690138082061, 0.782011770669178);
13
+ var b11_norm = normalize(sample.B11, 0.016388074192258, 0.493761397883092);
14
+ var b12_norm = normalize(sample.B12, 0, 0.493025984460231);
15
+ var viewZen_norm = normalize(Math.cos(sample.viewZenithMean * degToRad), 0.918595400582046, 1);
16
+ var sunZen_norm = normalize(Math.cos(sample.sunZenithAngles * degToRad), 0.342022871159208, 0.936206429175402);
17
+ var relAzim_norm = Math.cos((sample.sunAzimuthAngles - sample.viewAzimuthMean) * degToRad)
18
+
19
+ var n1 = neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
20
+ var n2 = neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
21
+ var n3 = neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
22
+ var n4 = neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
23
+ var n5 = neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
24
+
25
+ var l2 = layer2(n1, n2, n3, n4, n5);
26
+
27
+ var lai = denormalize(l2, 0.000319182538301, 14.4675094548151);
28
+ return {
29
+ default: [lai]
30
+ }
31
+ }
32
+
33
+ function neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
34
+ var sum =
35
+ + 4.96238030555279
36
+ - 0.023406878966470 * b03_norm
37
+ + 0.921655164636366 * b04_norm
38
+ + 0.135576544080099 * b05_norm
39
+ - 1.938331472397950 * b06_norm
40
+ - 3.342495816122680 * b07_norm
41
+ + 0.902277648009576 * b8a_norm
42
+ + 0.205363538258614 * b11_norm
43
+ - 0.040607844721716 * b12_norm
44
+ - 0.083196409727092 * viewZen_norm
45
+ + 0.260029270773809 * sunZen_norm
46
+ + 0.284761567218845 * relAzim_norm;
47
+
48
+ return tansig(sum);
49
+ }
50
+
51
+ function neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
52
+ var sum =
53
+ + 1.416008443981500
54
+ - 0.132555480856684 * b03_norm
55
+ - 0.139574837333540 * b04_norm
56
+ - 1.014606016898920 * b05_norm
57
+ - 1.330890038649270 * b06_norm
58
+ + 0.031730624503341 * b07_norm
59
+ - 1.433583541317050 * b8a_norm
60
+ - 0.959637898574699 * b11_norm
61
+ + 1.133115706551000 * b12_norm
62
+ + 0.216603876541632 * viewZen_norm
63
+ + 0.410652303762839 * sunZen_norm
64
+ + 0.064760155543506 * relAzim_norm;
65
+
66
+ return tansig(sum);
67
+ }
68
+
69
+ function neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
70
+ var sum =
71
+ + 1.075897047213310
72
+ + 0.086015977724868 * b03_norm
73
+ + 0.616648776881434 * b04_norm
74
+ + 0.678003876446556 * b05_norm
75
+ + 0.141102398644968 * b06_norm
76
+ - 0.096682206883546 * b07_norm
77
+ - 1.128832638862200 * b8a_norm
78
+ + 0.302189102741375 * b11_norm
79
+ + 0.434494937299725 * b12_norm
80
+ - 0.021903699490589 * viewZen_norm
81
+ - 0.228492476802263 * sunZen_norm
82
+ - 0.039460537589826 * relAzim_norm;
83
+
84
+ return tansig(sum);
85
+ }
86
+
87
+ function neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
88
+ var sum =
89
+ + 1.533988264655420
90
+ - 0.109366593670404 * b03_norm
91
+ - 0.071046262972729 * b04_norm
92
+ + 0.064582411478320 * b05_norm
93
+ + 2.906325236823160 * b06_norm
94
+ - 0.673873108979163 * b07_norm
95
+ - 3.838051868280840 * b8a_norm
96
+ + 1.695979344531530 * b11_norm
97
+ + 0.046950296081713 * b12_norm
98
+ - 0.049709652688365 * viewZen_norm
99
+ + 0.021829545430994 * sunZen_norm
100
+ + 0.057483827104091 * relAzim_norm;
101
+
102
+ return tansig(sum);
103
+ }
104
+
105
+ function neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
106
+ var sum =
107
+ + 3.024115930757230
108
+ - 0.089939416159969 * b03_norm
109
+ + 0.175395483106147 * b04_norm
110
+ - 0.081847329172620 * b05_norm
111
+ + 2.219895367487790 * b06_norm
112
+ + 1.713873975136850 * b07_norm
113
+ + 0.713069186099534 * b8a_norm
114
+ + 0.138970813499201 * b11_norm
115
+ - 0.060771761518025 * b12_norm
116
+ + 0.124263341255473 * viewZen_norm
117
+ + 0.210086140404351 * sunZen_norm
118
+ - 0.183878138700341 * relAzim_norm;
119
+
120
+ return tansig(sum);
121
+ }
122
+
123
+ function layer2(neuron1, neuron2, neuron3, neuron4, neuron5) {
124
+ var sum =
125
+ + 1.096963107077220
126
+ - 1.500135489728730 * neuron1
127
+ - 0.096283269121503 * neuron2
128
+ - 0.194935930577094 * neuron3
129
+ - 0.352305895755591 * neuron4
130
+ + 0.075107415847473 * neuron5;
131
+
132
+ return sum;
133
+ }
134
+
135
+ function normalize(unnormalized, min, max) {
136
+ return 2 * (unnormalized - min) / (max - min) - 1;
137
+ }
138
+
139
+ function denormalize(normalized, min, max) {
140
+ return 0.5 * (normalized + 1) * (max - min) + min;
141
+ }
142
+
143
+ function tansig(input) {
144
+ return 2 / (1 + Math.exp(-2 * input)) - 1;
145
+ }
146
+
147
+ function setup() {
148
+ return {
149
+ input: [{
150
+ bands: [
151
+ "B03",
152
+ "B04",
153
+ "B05",
154
+ "B06",
155
+ "B07",
156
+ "B8A",
157
+ "B11",
158
+ "B12",
159
+ "viewZenithMean",
160
+ "viewAzimuthMean",
161
+ "sunZenithAngles",
162
+ "sunAzimuthAngles"
163
+ ],
164
+ units: ["REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "DEGREES", "DEGREES", "DEGREES", "DEGREES"]
165
+ }],
166
+ output: [
167
+ {
168
+ id: "default",
169
+ sampleType: "FLOAT32",
170
+ bands: 1
171
+ }
172
+ ]
173
+ }
174
+ }
175
+
176
+ function evaluatePixel(sample, scene, metadata, customData, outputMetadata) {
177
+ const result = evaluatePixelOrig([sample], [scene], metadata, customData, outputMetadata);
178
+ return result[Object.keys(result)[0]];
179
+ }
scripts/ndmi.js ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+ const moistureRamps = [
3
+ [-0.8, 0x800000],
4
+ [-0.24, 0xff0000],
5
+ [-0.032, 0xffff00],
6
+ [0.032, 0x00ffff],
7
+ [0.24, 0x0000ff],
8
+ [0.8, 0x000080]
9
+ ];
10
+
11
+ const viz = new ColorRampVisualizer(moistureRamps);
12
+
13
+ function setup() {
14
+ return {
15
+ input: ["B8A", "B11", "dataMask"],
16
+ output: { bands: 4 }
17
+ };
18
+ }
19
+
20
+ function evaluatePixel(samples) {
21
+ let val = index(samples.B8A, samples.B11);
22
+ let imgVals = viz.process(val);
23
+ return imgVals.concat(samples.dataMask);
24
+ }
scripts/ndvi.js ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+
3
+ var degToRad = Math.PI / 180;
4
+
5
+ function evaluatePixelOrig(samples) {
6
+ var sample = samples[0];
7
+ var b03_norm = normalize(sample.B03, 0, 0.253061520471542);
8
+ var b04_norm = normalize(sample.B04, 0, 0.290393577911328);
9
+ var b05_norm = normalize(sample.B05, 0, 0.305398915248555);
10
+ var b06_norm = normalize(sample.B06, 0.006637972542253, 0.608900395797889);
11
+ var b07_norm = normalize(sample.B07, 0.013972727018939, 0.753827384322927);
12
+ var b8a_norm = normalize(sample.B8A, 0.026690138082061, 0.782011770669178);
13
+ var b11_norm = normalize(sample.B11, 0.016388074192258, 0.493761397883092);
14
+ var b12_norm = normalize(sample.B12, 0, 0.493025984460231);
15
+ var viewZen_norm = normalize(Math.cos(sample.viewZenithMean * degToRad), 0.918595400582046, 1);
16
+ var sunZen_norm = normalize(Math.cos(sample.sunZenithAngles * degToRad), 0.342022871159208, 0.936206429175402);
17
+ var relAzim_norm = Math.cos((sample.sunAzimuthAngles - sample.viewAzimuthMean) * degToRad)
18
+
19
+ var n1 = neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
20
+ var n2 = neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
21
+ var n3 = neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
22
+ var n4 = neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
23
+ var n5 = neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm);
24
+
25
+ var l2 = layer2(n1, n2, n3, n4, n5);
26
+
27
+ var lai = denormalize(l2, 0.000319182538301, 14.4675094548151);
28
+ return {
29
+ default: [lai]
30
+ }
31
+ }
32
+
33
+ function neuron1(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
34
+ var sum =
35
+ + 4.96238030555279
36
+ - 0.023406878966470 * b03_norm
37
+ + 0.921655164636366 * b04_norm
38
+ + 0.135576544080099 * b05_norm
39
+ - 1.938331472397950 * b06_norm
40
+ - 3.342495816122680 * b07_norm
41
+ + 0.902277648009576 * b8a_norm
42
+ + 0.205363538258614 * b11_norm
43
+ - 0.040607844721716 * b12_norm
44
+ - 0.083196409727092 * viewZen_norm
45
+ + 0.260029270773809 * sunZen_norm
46
+ + 0.284761567218845 * relAzim_norm;
47
+
48
+ return tansig(sum);
49
+ }
50
+
51
+ function neuron2(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
52
+ var sum =
53
+ + 1.416008443981500
54
+ - 0.132555480856684 * b03_norm
55
+ - 0.139574837333540 * b04_norm
56
+ - 1.014606016898920 * b05_norm
57
+ - 1.330890038649270 * b06_norm
58
+ + 0.031730624503341 * b07_norm
59
+ - 1.433583541317050 * b8a_norm
60
+ - 0.959637898574699 * b11_norm
61
+ + 1.133115706551000 * b12_norm
62
+ + 0.216603876541632 * viewZen_norm
63
+ + 0.410652303762839 * sunZen_norm
64
+ + 0.064760155543506 * relAzim_norm;
65
+
66
+ return tansig(sum);
67
+ }
68
+
69
+ function neuron3(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
70
+ var sum =
71
+ + 1.075897047213310
72
+ + 0.086015977724868 * b03_norm
73
+ + 0.616648776881434 * b04_norm
74
+ + 0.678003876446556 * b05_norm
75
+ + 0.141102398644968 * b06_norm
76
+ - 0.096682206883546 * b07_norm
77
+ - 1.128832638862200 * b8a_norm
78
+ + 0.302189102741375 * b11_norm
79
+ + 0.434494937299725 * b12_norm
80
+ - 0.021903699490589 * viewZen_norm
81
+ - 0.228492476802263 * sunZen_norm
82
+ - 0.039460537589826 * relAzim_norm;
83
+
84
+ return tansig(sum);
85
+ }
86
+
87
+ function neuron4(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
88
+ var sum =
89
+ + 1.533988264655420
90
+ - 0.109366593670404 * b03_norm
91
+ - 0.071046262972729 * b04_norm
92
+ + 0.064582411478320 * b05_norm
93
+ + 2.906325236823160 * b06_norm
94
+ - 0.673873108979163 * b07_norm
95
+ - 3.838051868280840 * b8a_norm
96
+ + 1.695979344531530 * b11_norm
97
+ + 0.046950296081713 * b12_norm
98
+ - 0.049709652688365 * viewZen_norm
99
+ + 0.021829545430994 * sunZen_norm
100
+ + 0.057483827104091 * relAzim_norm;
101
+
102
+ return tansig(sum);
103
+ }
104
+
105
+ function neuron5(b03_norm,b04_norm,b05_norm,b06_norm,b07_norm,b8a_norm,b11_norm,b12_norm, viewZen_norm,sunZen_norm,relAzim_norm) {
106
+ var sum =
107
+ + 3.024115930757230
108
+ - 0.089939416159969 * b03_norm
109
+ + 0.175395483106147 * b04_norm
110
+ - 0.081847329172620 * b05_norm
111
+ + 2.219895367487790 * b06_norm
112
+ + 1.713873975136850 * b07_norm
113
+ + 0.713069186099534 * b8a_norm
114
+ + 0.138970813499201 * b11_norm
115
+ - 0.060771761518025 * b12_norm
116
+ + 0.124263341255473 * viewZen_norm
117
+ + 0.210086140404351 * sunZen_norm
118
+ - 0.183878138700341 * relAzim_norm;
119
+
120
+ return tansig(sum);
121
+ }
122
+
123
+ function layer2(neuron1, neuron2, neuron3, neuron4, neuron5) {
124
+ var sum =
125
+ + 1.096963107077220
126
+ - 1.500135489728730 * neuron1
127
+ - 0.096283269121503 * neuron2
128
+ - 0.194935930577094 * neuron3
129
+ - 0.352305895755591 * neuron4
130
+ + 0.075107415847473 * neuron5;
131
+
132
+ return sum;
133
+ }
134
+
135
+ function normalize(unnormalized, min, max) {
136
+ return 2 * (unnormalized - min) / (max - min) - 1;
137
+ }
138
+
139
+ function denormalize(normalized, min, max) {
140
+ return 0.5 * (normalized + 1) * (max - min) + min;
141
+ }
142
+
143
+ function tansig(input) {
144
+ return 2 / (1 + Math.exp(-2 * input)) - 1;
145
+ }
146
+
147
+ function setup() {
148
+ return {
149
+ input: [{
150
+ bands: [
151
+ "B03",
152
+ "B04",
153
+ "B05",
154
+ "B06",
155
+ "B07",
156
+ "B8A",
157
+ "B11",
158
+ "B12",
159
+ "viewZenithMean",
160
+ "viewAzimuthMean",
161
+ "sunZenithAngles",
162
+ "sunAzimuthAngles"
163
+ ],
164
+ units: ["REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "REFLECTANCE", "DEGREES", "DEGREES", "DEGREES", "DEGREES"]
165
+ }],
166
+ output: [
167
+ {
168
+ id: "default",
169
+ sampleType: "FLOAT32",
170
+ bands: 1
171
+ }
172
+ ]
173
+ }
174
+ }
175
+
176
+ function evaluatePixel(sample, scene, metadata, customData, outputMetadata) {
177
+ const result = evaluatePixelOrig([sample], [scene], metadata, customData, outputMetadata);
178
+ return result[Object.keys(result)[0]];
179
+ }
scripts/truecolor.js ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //VERSION=3
2
+
3
+ function setup() {
4
+ return {
5
+ input: [{
6
+ bands: ["B02", "B03", "B04"]
7
+ }],
8
+ output: {
9
+ bands: 3
10
+ }
11
+ };
12
+ }
13
+
14
+ function evaluatePixel(sample) {
15
+ return [sample.B04, sample.B03, sample.B02];
16
+ }
senHub.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sentinelhub import (
2
+ MimeType,
3
+ CRS,
4
+ BBox,
5
+ SentinelHubRequest,
6
+ DataCollection,
7
+ bbox_to_dimensions,
8
+ )
9
+ from oauthlib.oauth2 import BackendApplicationClient
10
+ from requests_oauthlib import OAuth2Session
11
+
12
+ class SenHub:
13
+ '''
14
+ Class For handling requests to Senhub API.
15
+ '''
16
+ def __init__(self,config, resolution = 10,
17
+ data_source = DataCollection.SENTINEL2_L1C,
18
+ identifier ='default', mime_type = MimeType.TIFF):
19
+ self.resolution = resolution
20
+ self.config = config
21
+ self.setInputParameters(data_source)
22
+ self.setOutputParameters(identifier, mime_type)
23
+ self.set_token()
24
+
25
+ def setInputParameters(self, data_source):
26
+ '''
27
+ Select Source Satellite
28
+ '''
29
+ self.data_source = data_source
30
+
31
+ def setOutputParameters(self,identifier, mime_type):
32
+ '''
33
+ Select The return Type of request format and identifier
34
+ '''
35
+ self.identifier = identifier
36
+ self.mime_type = mime_type
37
+
38
+ def set_token(self):
39
+ '''
40
+ Fetch Tooken from sentinelhub api to be used for available dates
41
+ '''
42
+ client_id = self.config.sh_client_id
43
+ client_secret = self.config.sh_client_secret
44
+ client = BackendApplicationClient(client_id=client_id)
45
+ oauth = OAuth2Session(client=client)
46
+ token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token',client_secret=client_secret)
47
+ self.token = token['access_token']
48
+
49
+ def get_input_data(self, date):
50
+ '''
51
+ Wrap input_data to provide to the sentinelhub API
52
+ '''
53
+ return SentinelHubRequest.input_data(data_collection=self.data_source, time_interval=(date, date))
54
+
55
+ def get_output_data(self):
56
+ '''
57
+ Wrap output_data to provide to the sentinelhub API
58
+ '''
59
+ return SentinelHubRequest.output_response(self.identifier, self.mime_type)
60
+
61
+ def set_dir(self, dir_path):
62
+ '''
63
+ Set The Tragt Download Directory Path
64
+ '''
65
+ self.dir_path = dir_path
66
+
67
+ def make_bbox(self, bbox):
68
+ '''
69
+ Wrap bbox to provide to the sentinelhub API.
70
+ '''
71
+ self.bbox = BBox(bbox=bbox, crs=CRS.WGS84)
72
+ self.bbox_size = bbox_to_dimensions(self.bbox, resolution=self.resolution)
73
+
74
+ def make_request(self, metric, date):
75
+ '''
76
+ Setup the Sentinal Hub Request
77
+ '''
78
+ input_data = self.get_input_data(date)
79
+ output_data = self.get_output_data()
80
+ self.request = SentinelHubRequest(
81
+ data_folder=self.dir_path,
82
+ evalscript=metric,
83
+ input_data=[input_data],
84
+ responses=[output_data],
85
+ bbox=self.bbox,
86
+ size=self.bbox_size,
87
+ config=self.config,
88
+ )
89
+
90
+ def download_data(self, save=True , redownload=False):
91
+ '''
92
+ Make The Request and download the data
93
+ '''
94
+ return self.request.get_data(save_data=save, redownload=redownload)
95
+
96
+
97
+
98
+
test.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ # Set page configuration
4
+ st.set_page_config(
5
+ page_title="Your App Title",
6
+ page_icon=":shark:",
7
+ layout="wide", # Use "wide" for expanded layout
8
+ initial_sidebar_state="expanded",
9
+ )
10
+
11
+ # def local_css(file_name):
12
+ # with open(file_name) "r") as f:
13
+ # st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
14
+
15
+ # Write CSS to apply styles
16
+ def custom_css():
17
+ st.markdown("""
18
+ <style>
19
+ html, body, [data-testid="stAppViewContainer"] {
20
+ background-color: #000000; /* Black background */
21
+ color: #FFFFFF; /* White text color */
22
+ }
23
+ .stTextInput > label, .stSelectbox > label, .stRadio > label, .stCheckbox > label {
24
+ color: #CCCCCC; /* Lighter text for better contrast */
25
+ }
26
+ /* Additional styling can be added here */
27
+ </style>
28
+ """, unsafe_allow_html=True)
29
+
30
+ # Load CSS file (if you have a CSS file you prefer to use)
31
+ # local_css("styles.css")
32
+
33
+ # Apply custom CSS
34
+ custom_css()
35
+
36
+ # Your app code
37
+ st.title("Your Streamlit App")
38
+ st.write("This is a sample app with a black background.")
39
+
40
+ # Example of other components
41
+ st.text_input("Enter some text")
42
+ st.selectbox("Choose an option", ["Option 1", "Option 2", "Option 3"])
43
+ st.checkbox("Check me out")
users.db ADDED
Binary file (16.4 kB). View file
 
utils.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import folium
3
+ import confuse
4
+ import numpy as np
5
+ from math import isnan
6
+ import geopandas as gpd
7
+ from shapely.geometry import Point
8
+ from PIL import Image
9
+ from tqdm import tqdm
10
+
11
+ # Initialzie custom basemaps for folium
12
+ basemaps = {
13
+ 'Google Maps': folium.TileLayer(
14
+ tiles = 'https://mt1.google.com/vt/lyrs=m&x={x}&y={y}&z={z}',
15
+ attr = 'Google',
16
+ name = 'Google Maps',
17
+ overlay = True,
18
+ control = True
19
+ ),
20
+ 'Google Satellite': folium.TileLayer(
21
+ tiles = 'https://mt1.google.com/vt/lyrs=s&x={x}&y={y}&z={z}',
22
+ attr = 'Google',
23
+ name = 'Google Satellite',
24
+ overlay = True,
25
+ control = True
26
+ ),
27
+ 'Google Terrain': folium.TileLayer(
28
+ tiles = 'https://mt1.google.com/vt/lyrs=p&x={x}&y={y}&z={z}',
29
+ attr = 'Google',
30
+ name = 'Google Terrain',
31
+ overlay = True,
32
+ control = True
33
+ ),
34
+ 'Google Satellite Hybrid': folium.TileLayer(
35
+ tiles = 'https://mt1.google.com/vt/lyrs=y&x={x}&y={y}&z={z}',
36
+ attr = 'Google',
37
+ name = 'Google Satellite',
38
+ overlay = True,
39
+ control = True
40
+ ),
41
+ 'Esri Satellite': folium.TileLayer(
42
+ tiles = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
43
+ attr = 'Esri',
44
+ name = 'Esri Satellite',
45
+ overlay = True,
46
+ control = True
47
+ ),
48
+ 'openstreetmap': folium.TileLayer('openstreetmap'),
49
+ 'cartodbdark_matter': folium.TileLayer('cartodbdark_matter')
50
+ }
51
+
52
+
53
+ # Dictionary of JavaScript files (More Readable)
54
+ scripts_dir = './scripts/'
55
+ scripts_files = [f for f in os.listdir(scripts_dir) if f.endswith('.js')]
56
+ Scripts = {}
57
+ for f in scripts_files:
58
+ key = f.split('.')[0].upper()
59
+ with open(scripts_dir + f) as f:
60
+ Scripts[key] = f.read()
61
+
62
+ def calculate_bbox(df, field):
63
+ '''
64
+ Calculate the bounding box of a specfic field ID in a given data frame
65
+ '''
66
+ bbox = df.loc[df['name'] == field].bounds
67
+ r = bbox.iloc[0]
68
+ return [r.minx, r.miny, r.maxx, r.maxy]
69
+
70
+ def tiff_to_geodataframe(im, metric, date, crs):
71
+ '''
72
+ Convert a tiff image to a geodataframe
73
+ '''
74
+ x_cords = im.coords['x'].values
75
+ y_cords = im.coords['y'].values
76
+ vals = im.values
77
+ dims = vals.shape
78
+ points = []
79
+ v_s = []
80
+ for lat in range(dims[1]):
81
+ y = y_cords[lat]
82
+ for lon in range(dims[2]):
83
+ x = x_cords[lon]
84
+ v = vals[:,lat,lon]
85
+ if isnan(v[0]):
86
+ continue
87
+ points.append(Point(x,y))
88
+ v_s.append(v.item())
89
+ d = {f'{metric}_{date}': v_s, 'geometry': points}
90
+ df = gpd.GeoDataFrame(d, crs = crs)
91
+ return df
92
+
93
+ def get_bearer_token_headers(bearer_token):
94
+ '''
95
+ Get the bearer token headers to be used in the request to the SentinelHub API
96
+ '''
97
+ headers = {
98
+ 'Content-Type': 'application/json',
99
+ 'Authorization': 'Bearer '+ bearer_token,
100
+ }
101
+ return headers
102
+
103
+ def get_downloaded_location_img_path(clientName, metric, date, field, extension='tiff'):
104
+ '''
105
+ Get the path of the downloaded image in TIFF based on the:
106
+ '''
107
+ date_dir = f'./{clientName}/raw/{metric}/{date}/field_{field}/'
108
+ print(f'True Color Date Dir: {date_dir}')
109
+ os.makedirs(date_dir, exist_ok=True)
110
+ intermediate_dirs = os.listdir(date_dir)
111
+ print(f'Intermediate Dirs: {intermediate_dirs}')
112
+ if len(intermediate_dirs) == 0:
113
+ return None
114
+ imagePath = f'{date_dir}{os.listdir(date_dir)[0]}/response.{extension}'
115
+ print(f'Image Path: {imagePath}')
116
+ if not os.path.exists(imagePath):
117
+ return None
118
+ print(f'Image Path: {imagePath}')
119
+ return imagePath
120
+
121
+ def get_masked_location_img_path(clientName, metric, date, field):
122
+ '''
123
+ Get the path of the downloaded image after applying the mask in TIFF based on the:
124
+ '''
125
+ date_dir = f'./{clientName}/processed/{metric}/{date}/field_{field}/'
126
+ imagePath = date_dir + 'masked.tiff'
127
+ return imagePath
128
+
129
+ def get_curated_location_img_path(clientName, metric, date, field):
130
+ '''
131
+ Get the path of the downloaded image after applying the mask and converting it to geojson formay based on the:
132
+ '''
133
+ date_dir = f'./{clientName}/curated/{metric}/{date}/field_{field}/'
134
+ imagePath = date_dir + 'masked.geojson'
135
+
136
+ if os.path.exists(imagePath):
137
+ return imagePath
138
+ else:
139
+ return None
140
+
141
+ def parse_app_config(path=r'config-fgm-dev.yaml'):
142
+ config = confuse.Configuration('CropHealth', __name__)
143
+ config.set_file(path)
144
+ return config
145
+
146
+
147
+ def fix_image(img):
148
+ def normalize(band):
149
+ band_min, band_max = (band.min(), band.max())
150
+ return ((band-band_min)/((band_max - band_min)))
151
+ def brighten(band):
152
+ alpha=3
153
+ beta=0
154
+ return np.clip(alpha*band+beta, 0,255)
155
+ def gammacorr(band):
156
+ gamma=0.9
157
+ return np.power(band, 1/gamma)
158
+ red = img[:, :, 0]
159
+ green = img[:, :, 1]
160
+ blue = img[:, :, 2]
161
+ red_b=brighten(red)
162
+ blue_b=brighten(blue)
163
+ green_b=brighten(green)
164
+ red_bg=gammacorr(red_b)
165
+ blue_bg=gammacorr(blue_b)
166
+ green_bg=gammacorr(green_b)
167
+ red_bgn = normalize(red_bg)
168
+ green_bgn = normalize(green_bg)
169
+ blue_bgn = normalize(blue_bg)
170
+ rgb_composite_bgn= np.dstack((red_b, green_b, blue_b))
171
+ return rgb_composite_bgn
172
+
173
+
174
+ def creat_gif(dataset, gif_name, duration=50):
175
+ '''
176
+ Create a gif from a list of images
177
+ '''
178
+ imgs = [Image.fromarray((255*img).astype(np.uint8)) for img in dataset]
179
+ # duration is the number of milliseconds between frames; this is 40 frames per second
180
+ imgs[0].save(gif_name, save_all=True, append_images=imgs[1:], duration=duration, loop=1)
181
+
182
+
183
+ def add_lat_lon_to_gdf_from_geometry(gdf):
184
+ gdf['Lat'] = gdf['geometry'].apply(lambda p: p.x)
185
+ gdf['Lon'] = gdf['geometry'].apply(lambda p: p.y)
186
+ return gdf
187
+
188
+ def gdf_column_to_one_band_array(gdf, column_name):
189
+ gdf = gdf.sort_values(by=['Lat', 'Lon'])
190
+ gdf = gdf.reset_index(drop=True)
191
+ unique_lats_count = gdf['Lat'].nunique()
192
+ unique_lons_count = gdf['Lon'].nunique()
193
+ rows_arr = [[] for i in range(unique_lats_count)]
194
+ column_values = gdf[column_name].values
195
+ for i in tqdm(range(len(column_values))):
196
+ row_index = i // unique_lons_count
197
+ rows_arr[row_index].append(column_values[i])
198
+
199
+ max_row_length = max([len(row) for row in rows_arr])
200
+ for row in rows_arr:
201
+ while len(row) < max_row_length:
202
+ row.append(0)
203
+
204
+ rows_arr = np.array(rows_arr)
205
+ return rows_arr