Skip to content

Commit

Permalink
Merge pull request #3 from CIAT-DAPA/develop
Browse files Browse the repository at this point in the history
Test workflow
  • Loading branch information
Minotriz02 authored May 27, 2024
2 parents 580eef8 + fbf2a98 commit fd4d43a
Show file tree
Hide file tree
Showing 19 changed files with 531 additions and 7 deletions.
134 changes: 134 additions & 0 deletions .github/workflows/pipeline.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python

name: Devops AClimate API

on:
push:
branches: [ "stage" ]
tags:
- 'v*'


permissions:
contents: read

jobs:

# ------- START TEST PROCCESS -------- #

TestModules:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v3
with:
python-version: "3.9"
- name: Create environment
run: |
python -m venv env
- name: Active environment
run: |
source env/bin/activate
- name: Install dependencies
run: |
pip install -r ./requirements.txt
- name: Run Tests
run: |
python -m unittest discover -s ./src/test/ -p 'test_*.py'
# ------- END TEST PROCCESS -------- #

# ------- START MERGE PROCCESS -------- #

MergeMainModules:
needs: [TestModules]
name: Merge Stage with Main
permissions: write-all
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master

- name: Merge stage -> main
uses: devmasx/merge-branch@master
with:
type: now
head_to_merge: ${{ github.ref }}
target_branch: main
github_token: ${{ github.token }}

# ------- END MERGE PROCCESS -------- #

# ------- START RELEASE PROCCESS -------- #

PostRelease:
needs: MergeMainModules
name: Create Release
runs-on: ubuntu-latest
permissions: write-all
steps:
- uses: actions/checkout@v3
with:
fetch-depth: '0'
# API Zip
- name: Zip artifact for deployment
run: zip releaseModules.zip ./src/* -r
# Upload Artifacts
- name: Upload API artifact for deployment job
uses: actions/upload-artifact@v3
with:
name: API
path: releaseModules.zip
# Generate Tagname
- name: Generate Tagname for release
id: taggerDryRun
uses: anothrNick/github-tag-action@1.61.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WITH_V: true
DRY_RUN: true
DEFAULT_BUMP: patch
RELEASE_BRANCHES : stage,main
BRANCH_HISTORY: last
# Create release
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ github.token }}
with:
tag_name: ${{ steps.taggerDryRun.outputs.new_tag }}
release_name: Release ${{ steps.taggerDryRun.outputs.new_tag }}
#body_path: ./body.md
body: ${{ github.event.head_commit.message }}
draft: false
prerelease: false
# Upload Assets to release
- name: Upload Release Asset Modules
id: upload-modules-release-asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
asset_path: ./releaseModules.zip
asset_name: releaseModules.zip
asset_content_type: application/zip
# update version setup.py
- name: Checkout code
uses: actions/checkout@v3
with:
ref: main
- name: Update version
run: |
sed -i "s/version='.*'/version='${{ steps.taggerDryRun.outputs.new_tag }}'/" setup.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: "Update version to ${{ steps.taggerDryRun.outputs.new_tag }}"

# ------- END RELEASE PROCCESS -------- #
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,5 @@ bower_components/

# dotenv
.env

geo_config.txt
14 changes: 14 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from setuptools import setup, find_packages

setup(
name="aclimate_api",
version="0.1",
author="Minotriz02",
author_email="sebastian.lopez@cgiar.org",
description="API for the AClimate project",
url="https://github.com/CIAT-DAPA/aclimatepyapi",
download_url="https://github.com/CIAT-DAPA/aclimatepyapi",
packages=find_packages('src'),
package_dir={'': 'src'},
keywords=["aclimate", "api", "climate", "agriculture"],
)
114 changes: 114 additions & 0 deletions src/aclimate_api/geoserver.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import requests
import pandas as pd
import rasterio
import json
import re
import tempfile
import geopandas as gpd
import os

class Geoserver:

def __init__(self, url_root):
self.url_root = url_root

def get_geo_workspaces(self):
current_dir = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(current_dir, '../../geo_config.txt')

with open(config_file, "r") as f:
credentials = dict(line.strip().split('=') for line in f.readlines())

url = f"{self.url_root}/rest/workspaces.json"

response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))

data = json.loads(response.text)
workspaces_list = data['workspaces']['workspace']
workspace_df = pd.DataFrame([{'workspace_name': ws['name'], 'workspace_href': ws['href']} for ws in workspaces_list])
return workspace_df

# print(get_geo_workspaces("https://geo.aclimate.org/geoserver/"))

def get_geo_mosaic_name(self, workspace):
current_dir = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(current_dir, '../../geo_config.txt')

with open(config_file, "r") as f:
credentials = dict(line.strip().split('=') for line in f.readlines())

url = f"{self.url_root}/rest/workspaces/{workspace}/coveragestores.json"

response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))

if response.status_code == 200:
data = json.loads(response.text)
if 'coverageStores' in data and 'coverageStore' in data['coverageStores']:
mosaics_list = data['coverageStores']['coverageStore']
mosaics_df = pd.DataFrame([{'mosaic_name': ms['name'], 'mosaic_href': ms['href']} for ms in mosaics_list])
return mosaics_df
return pd.DataFrame()

# print(get_geo_mosaic_name("https://geo.aclimate.org/geoserver/", "waterpoints_et"))

def get_geo_mosaics(self, workspace, mosaic_name, year, month=1, day=1):
url = f"{self.url_root}{workspace}/ows?service=WCS&request=GetCoverage&version=2.0.1&coverageId={mosaic_name}&format=image/geotiff&subset=Time(\"{year}-{month:02d}-{day:02d}T00:00:00.000Z\")"
response = requests.get(url)

if response.status_code == 200:
# Create a temporary file
temp_tiff = tempfile.mktemp(suffix=".tif")
with open(temp_tiff, 'wb') as f:
f.write(response.content)

# Load the raster data
raster_data = rasterio.open(temp_tiff)
return raster_data.read()

else:
match_result = re.findall("<ows:ExceptionText>(.*?)</ows:ExceptionText>", response.text)
if match_result:
exception_text = match_result[0]
print(f"Error making the request. Status code: {response.status_code}\nMsg: {exception_text}")
else:
print(f"Error making the request. Status code: {response.status_code}")
return None

# print(get_geo_mosaics("https://geo.aclimate.org/geoserver/", "waterpoints_et", "biomass", 2024, 4, 22))

def get_geo_polygon_name(self, workspace):
current_dir = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(current_dir, '../../geo_config.txt')

with open(config_file, "r") as f:
credentials = dict(line.strip().split('=') for line in f.readlines())

url = f"{self.url_root}rest/workspaces/{workspace}/datastores.json"
response = requests.get(url, auth=(credentials['GEOSERVER_USER'], credentials['GEOSERVER_PASSWORD']))

if response.status_code == 200:
data = json.loads(response.text)
if 'dataStores' in data and 'dataStore' in data['dataStores']:
polygons_list = data['dataStores']['dataStore']
polygons_df = pd.DataFrame([{'polygon_name': pg['name'], 'polygon_href': pg['href']} for pg in polygons_list])
return polygons_df
return pd.DataFrame()

# print(get_geo_polygon_name("https://geo.aclimate.org/geoserver/", "fc_cenaos_hn"))

def get_geo_polygons(self, workspace, polygon_name):
url = f"{self.url_root}/{workspace}/ows?service=WFS&request=GetFeature&version=2.0.1&typeNames={workspace}:{polygon_name}&outputFormat=application/json"
response = requests.get(url)

if response.status_code == 200:
sf_obj_geoserver = gpd.read_file(response.text)
return sf_obj_geoserver
else:
exception_text = re.search(r'<ows:ExceptionText>(.*?)</ows:ExceptionText>', response.text)
if exception_text:
print(f"Error making the request. Status code: {response.status_code}\nMsg: {exception_text.group(1)}")
else:
print(f"Error making the request. Status code: {response.status_code}")
return None

# print(get_geo_polygons("https://geo.aclimate.org/geoserver/", "fc_cenaos_hn", "admin_levels"))
3 changes: 3 additions & 0 deletions src/aclimate_api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from forecast import Forecast
from agronomy import Agronomy
from historical import Historical
from geoserver import Geoserver

def main():
g = Geographic("https://webapi.aclimate.org/api/")
Expand All @@ -12,6 +13,8 @@ def main():
print(a.get_agronomy())
h = Historical("https://webapi.aclimate.org/api/")
print(h.get_historical_climatology(["5a7e422057d7f316c8bc574e"]))
gs = Geoserver("https://geo.aclimate.org/geoserver/")
print(gs.get_geo_workspaces())

if __name__ == "__main__":
main()
Binary file added src/requirements.txt
Binary file not shown.
7 changes: 0 additions & 7 deletions src/setup.py

This file was deleted.

File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit fd4d43a

Please sign in to comment.