diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml
new file mode 100644
index 0000000..1668e19
--- /dev/null
+++ b/.github/workflows/pipeline.yml
@@ -0,0 +1,134 @@
+# This workflow will install Python dependencies, run tests and lint with a single version of Python
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
+
+name: Devops AClimate API
+
+on:
+ push:
+ branches: [ "stage" ]
+ tags:
+ - 'v*'
+
+
+permissions:
+ contents: read
+
+jobs:
+
+# ------- START TEST PROCCESS -------- #
+
+ TestModules:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v3
+ with:
+ python-version: "3.9"
+ - name: Create environment
+ run: |
+ python -m venv env
+ - name: Active environment
+ run: |
+ source env/bin/activate
+ - name: Install dependencies
+ run: |
+ pip install -r ./requirements.txt
+ - name: Run Tests
+ run: |
+ python -m unittest discover -s ./src/test/ -p 'test_*.py'
+# ------- END TEST PROCCESS -------- #
+
+# ------- START MERGE PROCCESS -------- #
+
+ MergeMainModules:
+ needs: [TestModules]
+ name: Merge Stage with Main
+ permissions: write-all
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@master
+
+ - name: Merge stage -> main
+ uses: devmasx/merge-branch@master
+ with:
+ type: now
+ head_to_merge: ${{ github.ref }}
+ target_branch: main
+ github_token: ${{ github.token }}
+
+# ------- END MERGE PROCCESS -------- #
+
+# ------- START RELEASE PROCCESS -------- #
+
+ PostRelease:
+ needs: MergeMainModules
+ name: Create Release
+ runs-on: ubuntu-latest
+ permissions: write-all
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: '0'
+ # API Zip
+ - name: Zip artifact for deployment
+ run: zip releaseModules.zip ./src/* -r
+ # Upload Artifacts
+ - name: Upload API artifact for deployment job
+ uses: actions/upload-artifact@v3
+ with:
+ name: API
+ path: releaseModules.zip
+ # Generate Tagname
+ - name: Generate Tagname for release
+ id: taggerDryRun
+ uses: anothrNick/github-tag-action@1.61.0
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ WITH_V: true
+ DRY_RUN: true
+ DEFAULT_BUMP: patch
+ RELEASE_BRANCHES : stage,main
+ BRANCH_HISTORY: last
+ # Create release
+ - name: Create Release
+ id: create_release
+ uses: actions/create-release@v1
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ with:
+ tag_name: ${{ steps.taggerDryRun.outputs.new_tag }}
+ release_name: Release ${{ steps.taggerDryRun.outputs.new_tag }}
+ #body_path: ./body.md
+ body: ${{ github.event.head_commit.message }}
+ draft: false
+ prerelease: false
+ # Upload Assets to release
+ - name: Upload Release Asset Modules
+ id: upload-modules-release-asset
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
+ asset_path: ./releaseModules.zip
+ asset_name: releaseModules.zip
+ asset_content_type: application/zip
+ # update version setup.py
+ - name: Checkout code
+ uses: actions/checkout@v3
+ with:
+ ref: main
+ - name: Update version
+ run: |
+ sed -i "s/version='.*'/version='${{ steps.taggerDryRun.outputs.new_tag }}'/" setup.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Commit changes
+ uses: stefanzweifel/git-auto-commit-action@v4
+ with:
+ commit_message: "Update version to ${{ steps.taggerDryRun.outputs.new_tag }}"
+
+# ------- END RELEASE PROCCESS -------- #
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 9b24280..05f1422 100644
--- a/.gitignore
+++ b/.gitignore
@@ -103,3 +103,5 @@ bower_components/
# dotenv
.env
+
+geo_config.txt
\ No newline at end of file
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..ea578ca
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,14 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="aclimate_api",
+ version="0.1",
+ author="Minotriz02",
+ author_email="sebastian.lopez@cgiar.org",
+ description="API for the AClimate project",
+ url="https://github.com/CIAT-DAPA/aclimatepyapi",
+ download_url="https://github.com/CIAT-DAPA/aclimatepyapi",
+ packages=find_packages('src'),
+ package_dir={'': 'src'},
+ keywords=["aclimate", "api", "climate", "agriculture"],
+)
\ No newline at end of file
diff --git a/src/aclimate_api/geoserver.py b/src/aclimate_api/geoserver.py
new file mode 100644
index 0000000..c1f4947
--- /dev/null
+++ b/src/aclimate_api/geoserver.py
@@ -0,0 +1,114 @@
+import requests
+import pandas as pd
+import rasterio
+import json
+import re
+import tempfile
+import geopandas as gpd
+import os
+
+class Geoserver:
+
+ def __init__(self, url_root):
+ self.url_root = url_root
+
+ def get_geo_workspaces(self):
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ config_file = os.path.join(current_dir, '../../geo_config.txt')
+
+ with open(config_file, "r") as f:
+ credentials = dict(line.strip().split('=') for line in f.readlines())
+
+ url = f"{self.url_root}/rest/workspaces.json"
+
+ response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))
+
+ data = json.loads(response.text)
+ workspaces_list = data['workspaces']['workspace']
+ workspace_df = pd.DataFrame([{'workspace_name': ws['name'], 'workspace_href': ws['href']} for ws in workspaces_list])
+ return workspace_df
+
+ # print(get_geo_workspaces("https://geo.aclimate.org/geoserver/"))
+
+ def get_geo_mosaic_name(self, workspace):
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ config_file = os.path.join(current_dir, '../../geo_config.txt')
+
+ with open(config_file, "r") as f:
+ credentials = dict(line.strip().split('=') for line in f.readlines())
+
+ url = f"{self.url_root}/rest/workspaces/{workspace}/coveragestores.json"
+
+ response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))
+
+ if response.status_code == 200:
+ data = json.loads(response.text)
+ if 'coverageStores' in data and 'coverageStore' in data['coverageStores']:
+ mosaics_list = data['coverageStores']['coverageStore']
+ mosaics_df = pd.DataFrame([{'mosaic_name': ms['name'], 'mosaic_href': ms['href']} for ms in mosaics_list])
+ return mosaics_df
+ return pd.DataFrame()
+
+ # print(get_geo_mosaic_name("https://geo.aclimate.org/geoserver/", "waterpoints_et"))
+
+ def get_geo_mosaics(self, workspace, mosaic_name, year, month=1, day=1):
+ url = f"{self.url_root}{workspace}/ows?service=WCS&request=GetCoverage&version=2.0.1&coverageId={mosaic_name}&format=image/geotiff&subset=Time(\"{year}-{month:02d}-{day:02d}T00:00:00.000Z\")"
+ response = requests.get(url)
+
+ if response.status_code == 200:
+ # Create a temporary file
+ temp_tiff = tempfile.mktemp(suffix=".tif")
+ with open(temp_tiff, 'wb') as f:
+ f.write(response.content)
+
+ # Load the raster data
+ raster_data = rasterio.open(temp_tiff)
+ return raster_data.read()
+
+ else:
+ match_result = re.findall("(.*?)", response.text)
+ if match_result:
+ exception_text = match_result[0]
+ print(f"Error making the request. Status code: {response.status_code}\nMsg: {exception_text}")
+ else:
+ print(f"Error making the request. Status code: {response.status_code}")
+ return None
+
+ # print(get_geo_mosaics("https://geo.aclimate.org/geoserver/", "waterpoints_et", "biomass", 2024, 4, 22))
+
+ def get_geo_polygon_name(self, workspace):
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ config_file = os.path.join(current_dir, '../../geo_config.txt')
+
+ with open(config_file, "r") as f:
+ credentials = dict(line.strip().split('=') for line in f.readlines())
+
+ url = f"{self.url_root}rest/workspaces/{workspace}/datastores.json"
+ response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))
+
+ if response.status_code == 200:
+ data = json.loads(response.text)
+ if 'dataStores' in data and 'dataStore' in data['dataStores']:
+ polygons_list = data['dataStores']['dataStore']
+ polygons_df = pd.DataFrame([{'polygon_name': pg['name'], 'polygon_href': pg['href']} for pg in polygons_list])
+ return polygons_df
+ return pd.DataFrame()
+
+ # print(get_geo_polygon_name("https://geo.aclimate.org/geoserver/", "fc_cenaos_hn"))
+
+ def get_geo_polygons(self, workspace, polygon_name):
+ url = f"{self.url_root}/{workspace}/ows?service=WFS&request=GetFeature&version=2.0.1&typeNames={workspace}:{polygon_name}&outputFormat=application/json"
+ response = requests.get(url)
+
+ if response.status_code == 200:
+ sf_obj_geoserver = gpd.read_file(response.text)
+ return sf_obj_geoserver
+ else:
+ exception_text = re.search(r'(.*?)', response.text)
+ if exception_text:
+ print(f"Error making the request. Status code: {response.status_code}\nMsg: {exception_text.group(1)}")
+ else:
+ print(f"Error making the request. Status code: {response.status_code}")
+ return None
+
+ # print(get_geo_polygons("https://geo.aclimate.org/geoserver/", "fc_cenaos_hn", "admin_levels"))
\ No newline at end of file
diff --git a/src/aclimate_api/main.py b/src/aclimate_api/main.py
index 45e4549..bad7462 100644
--- a/src/aclimate_api/main.py
+++ b/src/aclimate_api/main.py
@@ -2,6 +2,7 @@
from forecast import Forecast
from agronomy import Agronomy
from historical import Historical
+from geoserver import Geoserver
def main():
g = Geographic("https://webapi.aclimate.org/api/")
@@ -12,6 +13,8 @@ def main():
print(a.get_agronomy())
h = Historical("https://webapi.aclimate.org/api/")
print(h.get_historical_climatology(["5a7e422057d7f316c8bc574e"]))
+ gs = Geoserver("https://geo.aclimate.org/geoserver/")
+ print(gs.get_geo_workspaces())
if __name__ == "__main__":
main()
\ No newline at end of file
diff --git a/src/requirements.txt b/src/requirements.txt
new file mode 100644
index 0000000..f628ade
Binary files /dev/null and b/src/requirements.txt differ
diff --git a/src/setup.py b/src/setup.py
deleted file mode 100644
index 6709a34..0000000
--- a/src/setup.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from setuptools import setup, find_packages
-
-setup(
- name="aclimate_api",
- version="0.1",
- packages=find_packages(),
-)
\ No newline at end of file
diff --git a/test/__init__.py b/src/test/__init__.py
similarity index 100%
rename from test/__init__.py
rename to src/test/__init__.py
diff --git a/test/mock/__init__.py b/src/test/mock/__init__.py
similarity index 100%
rename from test/mock/__init__.py
rename to src/test/mock/__init__.py
diff --git a/test/mock/mock_agronomy.py b/src/test/mock/mock_agronomy.py
similarity index 100%
rename from test/mock/mock_agronomy.py
rename to src/test/mock/mock_agronomy.py
diff --git a/test/mock/mock_forecast.py b/src/test/mock/mock_forecast.py
similarity index 100%
rename from test/mock/mock_forecast.py
rename to src/test/mock/mock_forecast.py
diff --git a/test/mock/mock_geographic.py b/src/test/mock/mock_geographic.py
similarity index 100%
rename from test/mock/mock_geographic.py
rename to src/test/mock/mock_geographic.py
diff --git a/src/test/mock/mock_geoserver.py b/src/test/mock/mock_geoserver.py
new file mode 100644
index 0000000..bc5746f
--- /dev/null
+++ b/src/test/mock/mock_geoserver.py
@@ -0,0 +1,167 @@
+geo_workspace_mock_data = '''
+{
+ "workspaces": {
+ "workspace": [
+ {
+ "name": "climate_extreme_indices_et",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_extreme_indices_et.json"
+ },
+ {
+ "name": "fertilizer_et",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/fertilizer_et.json"
+ },
+ {
+ "name": "aclimate_et",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/aclimate_et.json"
+ },
+ {
+ "name": "administrative",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative.json"
+ },
+ {
+ "name": "aclimate_gt",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/aclimate_gt.json"
+ },
+ {
+ "name": "climate_indices_pe",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe.json"
+ },
+ {
+ "name": "waterpoints_et",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/waterpoints_et.json"
+ },
+ {
+ "name": "agroclimate_indices_ao",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/agroclimate_indices_ao.json"
+ }
+ ]
+ }
+}
+'''
+geo_mosaic_name_mock_data = '''
+{
+ "coverageStores": {
+ "coverageStore": [
+ {
+ "name": "freq_rh80_t_20_25",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/freq_rh80_t_20_25.json"
+ },
+ {
+ "name": "freq_wb0_t30",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/freq_wb0_t30.json"
+ },
+ {
+ "name": "r10_mm",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/r10_mm.json"
+ },
+ {
+ "name": "r1_mm",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/r1_mm.json"
+ },
+ {
+ "name": "r1mm_consec",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/r1mm_consec.json"
+ },
+ {
+ "name": "r1mm_periods",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/r1mm_periods.json"
+ },
+ {
+ "name": "rh_80",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/rh_80.json"
+ },
+ {
+ "name": "t15_consec",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t15_consec.json"
+ },
+ {
+ "name": "t15_periods",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t15_periods.json"
+ },
+ {
+ "name": "t30_consec",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t30_consec.json"
+ },
+ {
+ "name": "t30_periods",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t30_periods.json"
+ },
+ {
+ "name": "t_15",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t_15.json"
+ },
+ {
+ "name": "t_30",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/t_30.json"
+ },
+ {
+ "name": "total_rainfall",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/total_rainfall.json"
+ },
+ {
+ "name": "wb_0",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/climate_indices_pe/coveragestores/wb_0.json"
+ }
+ ]
+ }
+}
+'''
+
+geo_polygon_name_mock_data = '''
+{
+ "dataStores": {
+ "dataStore": [
+ {
+ "name": "ao_adm1",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/ao_adm1.json"
+ },
+ {
+ "name": "ao_adm2",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/ao_adm2.json"
+ },
+ {
+ "name": "et_adm1",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/et_adm1.json"
+ },
+ {
+ "name": "et_adm2",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/et_adm2.json"
+ },
+ {
+ "name": "et_adm3",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/et_adm3.json"
+ },
+ {
+ "name": "et_adm4",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/et_adm4.json"
+ },
+ {
+ "name": "pe_adm1",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/pe_adm1.json"
+ },
+ {
+ "name": "pe_adm2",
+ "href": "https://geo.aclimate.org/geoserver/rest/workspaces/administrative/datastores/pe_adm2.json"
+ }
+ ]
+ }
+}
+'''
+
+geo_polygon_mock_data = '''
+{
+ "type": "FeatureCollection",
+ "features": [
+ {
+ "type": "Feature",
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [[[30, 10], [40, 40], [20, 40], [10, 20], [30, 10]]]
+ },
+ "properties": {
+ "name": "polygon1"
+ }
+ }
+ ]
+}
+'''
\ No newline at end of file
diff --git a/test/mock/mock_historical.py b/src/test/mock/mock_historical.py
similarity index 100%
rename from test/mock/mock_historical.py
rename to src/test/mock/mock_historical.py
diff --git a/test/test_agronomy.py b/src/test/test_agronomy.py
similarity index 100%
rename from test/test_agronomy.py
rename to src/test/test_agronomy.py
diff --git a/test/test_forecast.py b/src/test/test_forecast.py
similarity index 100%
rename from test/test_forecast.py
rename to src/test/test_forecast.py
diff --git a/test/test_geographic.py b/src/test/test_geographic.py
similarity index 100%
rename from test/test_geographic.py
rename to src/test/test_geographic.py
diff --git a/src/test/test_geoserver.py b/src/test/test_geoserver.py
new file mode 100644
index 0000000..fe1d51a
--- /dev/null
+++ b/src/test/test_geoserver.py
@@ -0,0 +1,97 @@
+import unittest
+from unittest.mock import patch
+import pandas as pd
+import numpy as np
+import sys
+import os
+from io import BytesIO
+from rasterio.io import MemoryFile
+import geopandas as gpd
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+from src.aclimate_api.geoserver import Geoserver
+from test.mock.mock_geoserver import geo_workspace_mock_data, geo_mosaic_name_mock_data, geo_polygon_name_mock_data, geo_polygon_mock_data
+
+url_root = "https://webapi.aclimate.org/api/"
+
+class TestGeoserver(unittest.TestCase):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.geoserver = Geoserver(url_root)
+
+ @patch('requests.get')
+ def test_get_geo_workspaces(self, mock_get):
+ mock_get.return_value.status_code = 200
+ mock_get.return_value.text = geo_workspace_mock_data
+
+ result = self.geoserver.get_geo_workspaces()
+
+ self.assertIsInstance(result, pd.DataFrame)
+ self.assertIn("workspace_name", result.columns)
+ self.assertIn("workspace_href", result.columns)
+ self.assertEqual(result.shape[1], 2)
+ self.assertEqual(result.shape[0], 8)
+
+ @patch('requests.get')
+ def test_get_geo_mosaic_name(self, mock_get):
+ workspace = "waterpoints_et"
+ mock_get.return_value.status_code = 200
+ mock_get.return_value.text = geo_mosaic_name_mock_data
+
+ result = self.geoserver.get_geo_mosaic_name(workspace)
+
+ self.assertIsInstance(result, pd.DataFrame)
+ self.assertIn("mosaic_name", result.columns)
+ self.assertIn("mosaic_href", result.columns)
+ self.assertEqual(result.shape[1], 2)
+ self.assertEqual(result.shape[0], 15)
+
+ @patch('requests.get')
+ def test_get_geo_mosaics(self, mock_get):
+ workspace = "waterpoints_et"
+ mosaic_name = "biomass"
+ year = 2024
+ month = 4
+ day = 22
+ # Create a mock response content (a simple in-memory GeoTIFF)
+ mock_tiff = BytesIO()
+ with MemoryFile() as memfile:
+ with memfile.open(driver='GTiff', height=1, width=1, count=1, dtype=np.uint8) as dst:
+ dst.write(np.array([[1]], dtype=np.uint8), 1)
+ mock_tiff.write(memfile.read())
+ mock_tiff.seek(0)
+ mock_get.return_value.status_code = 200
+ mock_get.return_value.content = mock_tiff.read()
+
+ result = self.geoserver.get_geo_mosaics(workspace, mosaic_name, year, month, day)
+
+ self.assertIsInstance(result, np.ndarray)
+
+ @patch('requests.get')
+ def test_get_geo_polygon_name(self, mock_get):
+ workspace = "fc_cenaos_hn"
+ mock_get.return_value.status_code = 200
+ mock_get.return_value.text = geo_polygon_name_mock_data
+
+ result = self.geoserver.get_geo_polygon_name(workspace)
+
+ self.assertIsInstance(result, pd.DataFrame)
+ self.assertIn("polygon_name", result.columns)
+ self.assertIn("polygon_href", result.columns)
+ self.assertEqual(result.shape[1], 2)
+ self.assertEqual(result.shape[0], 8)
+
+ @patch('requests.get')
+ def test_get_geo_polygons(self, mock_get):
+ workspace = "administrative"
+ polygon_name = "ao_adm1"
+ mock_get.return_value.status_code = 200
+ mock_get.return_value.text = geo_polygon_mock_data
+
+ result = self.geoserver.get_geo_polygons(workspace, polygon_name)
+
+ self.assertIsInstance(result, gpd.GeoDataFrame)
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/test/test_historical.py b/src/test/test_historical.py
similarity index 100%
rename from test/test_historical.py
rename to src/test/test_historical.py