Title: | OHA/SI APIs Package |
---|---|
Description: | Provides a series of base functions useful to the GH OHA SI team. These function extend the utility functions in glamr, focusing primarily on API utility functions. |
Authors: | Aaron Chafetz [aut, cre], Tim Essam [aut], Baboyma Kagniniwa [aut] |
Maintainer: | Aaron Chafetz <[email protected]> |
License: | MIT + file LICENSE |
Version: | 2.1.2 |
Built: | 2024-11-22 06:24:25 UTC |
Source: | https://github.com/USAID-OHA-SI/grabr |
Get dimension / item id
datim_dim_item( dimension, name, username, password, baseurl = "https://final.datim.org/" )
datim_dim_item( dimension, name, username, password, baseurl = "https://final.datim.org/" )
dimension |
Dimension name |
name |
Item name |
username |
DATIM Account Username |
password |
DATIM Account password |
baseurl |
DATIM API end point |
UID of item
## Not run: library(grabr) datim_dim_item(dimension = "Funding Agency", name = "USAID") datim_dim_item(dimension = "Targets / Results", name = "MER Results") datim_dim_item(dimension = "Targets / Results", name = "MER Targets") ## End(Not run)
## Not run: library(grabr) datim_dim_item(dimension = "Funding Agency", name = "USAID") datim_dim_item(dimension = "Targets / Results", name = "MER Results") datim_dim_item(dimension = "Targets / Results", name = "MER Targets") ## End(Not run)
Get PEPFAR/DATIM Dimension Items
datim_dim_items( dimension, username, password, var = NULL, fields = NULL, baseurl = "https://final.datim.org/" )
datim_dim_items( dimension, username, password, var = NULL, fields = NULL, baseurl = "https://final.datim.org/" )
dimension |
Dimension name |
username |
DATIM Account Username |
password |
DATIM Account passward |
var |
column name to pull values from, id or item |
fields |
list of column names to return, this will overwrite 'var' |
baseurl |
DATIM API end point |
Dimension's items as tibble or vector
## Not run: library(grabr) datim_dim_items(dimension = "Funding Agency") datim_dim_items(dimension = "Funding Agency", var = "item") ## End(Not run)
## Not run: library(grabr) datim_dim_items(dimension = "Funding Agency") datim_dim_items(dimension = "Funding Agency", var = "item") ## End(Not run)
Build PEPFAR/DATIM Query dimension
datim_dim_url( dimension, items = NULL, username, password, baseurl = "https://final.datim.org/" )
datim_dim_url( dimension, items = NULL, username, password, baseurl = "https://final.datim.org/" )
dimension |
Dimension name |
items |
Item name |
username |
DATIM Account Username |
password |
DATIM Account passward |
baseurl |
DATIM API End point |
Valid DATIM Query Params url
## Not run: library(grabr) datim_dim_url(dimension = "Sex") datim_dim_url( dimension = "Disaggregation Type", items = "Age/Sex/HIVStatus" ) datim_dim_url( dimension = "Disaggregation Type", items = c("Age/Sex", "Age/Sex/HIVStatus") ) ## End(Not run)
## Not run: library(grabr) datim_dim_url(dimension = "Sex") datim_dim_url( dimension = "Disaggregation Type", items = "Age/Sex/HIVStatus" ) datim_dim_url( dimension = "Disaggregation Type", items = c("Age/Sex", "Age/Sex/HIVStatus") ) ## End(Not run)
Get PEPFAR/DATIM Dimension ID
datim_dimension(name, username, password, baseurl = "https://final.datim.org/")
datim_dimension(name, username, password, baseurl = "https://final.datim.org/")
name |
Dimension name |
username |
DATIM Account Username, recommended using glamr::datim_user()' |
password |
DATIM Account passward, recommended using glamr::datim_pwd()' |
baseurl |
DATIM API End point, default value is 'https://final.datim.org/' |
dimension uid
## Not run: library(grabr) datim_dimension("OU Level") ## End(Not run)
## Not run: library(grabr) datim_dimension("OU Level") ## End(Not run)
Get PEPFAR/DATIM dimensions
datim_dimensions( username, password, var = NULL, baseurl = "https://final.datim.org" )
datim_dimensions( username, password, var = NULL, baseurl = "https://final.datim.org" )
username |
DATIM Account Username |
password |
DATIM Account passward |
var |
Column name to pull all values from, default is NULL, options are: id, dimension |
baseurl |
DATIM API End point, , default value is 'https://final.datim.org' |
Dimensions as tibble or list of ids / dimension names
## Not run: library(grabr) datim_dimensions() ## End(Not run)
## Not run: library(grabr) datim_dimensions() ## End(Not run)
Execute Datim Query
datim_execute_query(url, username, password, flatten = FALSE)
datim_execute_query(url, username, password, flatten = FALSE)
url |
API Base url & all query parameters |
username |
Datim username, recommend using 'glamr::datim_user()' |
password |
Datim password, recommend using 'glamr::datim_pwd()' |
flatten |
Should query json result be flatten? Default is false |
returns query results as json object, or NULL when error occurs.
## Not run: library(grabr) datim_execute_query( url = 'https://www.datim.org/api/sqlViews/<uid>?format=json', username =glamr::datim_user(), password =glamr::datim_pwd(), flatten = TRUE ) ## End(Not run)
## Not run: library(grabr) datim_execute_query( url = 'https://www.datim.org/api/sqlViews/<uid>?format=json', username =glamr::datim_user(), password =glamr::datim_pwd(), flatten = TRUE ) ## End(Not run)
Extract Mechanisms infos from Datim
datim_mechs(cntry, username, password, agency = "USAID", baseurl = NULL)
datim_mechs(cntry, username, password, agency = "USAID", baseurl = NULL)
cntry |
Country name |
username |
Datim username |
password |
Datim password |
agency |
Agency name |
baseurl |
Datim API Base URL |
OU Mechanisms as data frame
## Not run: library(grabr) datim_mechs( cntry = "Mozambique", username = glamr::datim_user(), password = glamr::datim_pwd() ) ## End(Not run)
## Not run: library(grabr) datim_mechs( cntry = "Mozambique", username = glamr::datim_user(), password = glamr::datim_pwd() ) ## End(Not run)
Pull Orgunits SQLView
datim_orgunits( cntry, username, password, reshape = FALSE, baseurl = "https://final.datim.org/" )
datim_orgunits( cntry, username, password, reshape = FALSE, baseurl = "https://final.datim.org/" )
cntry |
Country name |
username |
Datim username |
password |
Datim password |
reshape |
Unpack parent org units |
baseurl |
Datim API Base URL, default to https://final.datim.org/ |
OU/Country Orgunits as a data frame
## Not run: library(grabr) datim_orgunits( cntry = "Mozambique", username = glamr::datim_user(), password = glamr::datim_pwd() ) ## End(Not run)
## Not run: library(grabr) datim_orgunits( cntry = "Mozambique", username = glamr::datim_user(), password = glamr::datim_pwd() ) ## End(Not run)
Extract PLHIV and General POP Estimates from datim
datim_pops( ou, username, password, level = "country", fy = NULL, hierarchy = FALSE, baseurl = "https://final.datim.org/" )
datim_pops( ou, username, password, level = "country", fy = NULL, hierarchy = FALSE, baseurl = "https://final.datim.org/" )
ou |
Operatingunit |
username |
Datim account username |
password |
Datim account password |
level |
Organization level |
fy |
Fiscal Year |
hierarchy |
Should additional organizational hierarchy be added?, default is FALSE |
baseurl |
DATIM API base url |
PLHIV and POP_EST Data
## Not run: library(grabr) datim_pops(ou = "Nigeria") datim_pops(ou = "Nigeria", fy = 2021) datim_pops(ou = "Nigeria", level = "psnu", fy = 2021, hierarchy = TRUE) ## End(Not run)
## Not run: library(grabr) datim_pops(ou = "Nigeria") datim_pops(ou = "Nigeria", fy = 2021) datim_pops(ou = "Nigeria", level = "psnu", fy = 2021, hierarchy = TRUE) ## End(Not run)
Process Datim Query results
datim_process_query(url, username, password)
datim_process_query(url, username, password)
url |
Datim API Call url |
username |
Datim username, recommend using 'glamr::datim_user()' |
password |
Datim password, recommend using 'glamr::datim_pwd()' |
Data as tibble or NULL when error occurs.
## Not run: library(grabr) datim_process_query("<full-api-call-url>") ## End(Not run)
## Not run: library(grabr) datim_process_query("<full-api-call-url>") ## End(Not run)
Extract PEPFAR Org Hierarchy
datim_pull_hierarchy( ou_uid, username, password, add_geom = FALSE, baseurl = "https://final.datim.org/", folderpath_output = NULL )
datim_pull_hierarchy( ou_uid, username, password, add_geom = FALSE, baseurl = "https://final.datim.org/", folderpath_output = NULL )
ou_uid |
UID for the country, recommend using identify_ouuids |
username |
DATIM username, recommend using datim_user |
password |
DATIM password, recommend using datim_pwd |
add_geom |
Add geometry column to the output, default is false |
baseurl |
API base url, default = https://final.datim.org/ |
folderpath_output |
provide the full path to the folder for saving |
This function is migrated from Wavelength and is similar to datim_orgunits
## Not run: #get OU UID ouuid <- identify_ouuids() %>% dplyr::filter(country == "Kenya") #pull hierarchy (paths are all UIDs) df <- datim_pull_hierarchy(ouuid, username = myuser, password = mypwd(myuser)) ## End(Not run)
## Not run: #get OU UID ouuid <- identify_ouuids() %>% dplyr::filter(country == "Kenya") #pull hierarchy (paths are all UIDs) df <- datim_pull_hierarchy(ouuid, username = myuser, password = mypwd(myuser)) ## End(Not run)
Query PEPFAR/DATIM targets/results data
datim_query( ou, username, password, level = "prioritization", pe = "THIS_FINANCIAL_YEAR", ta = "PLHIV", value = NULL, disaggs = NULL, dimensions = NULL, property = "SHORTNAME", metadata = TRUE, hierarchy = TRUE, baseurl = "https://final.datim.org/", verbose = FALSE )
datim_query( ou, username, password, level = "prioritization", pe = "THIS_FINANCIAL_YEAR", ta = "PLHIV", value = NULL, disaggs = NULL, dimensions = NULL, property = "SHORTNAME", metadata = TRUE, hierarchy = TRUE, baseurl = "https://final.datim.org/", verbose = FALSE )
ou |
Operatingunit |
username |
Datim username |
password |
Datim username |
level |
Organization hierarchy level |
pe |
Reporting period. This can be expressed as relative or fixed periods Eg.: "THIS_FINANCIAL_YEAR", "2020Oct", "QUARTERS_THIS_YEAR", "2021Q2" default is "THIS_FINANCIAL_YEAR" |
ta |
Technical Area, valid option can be obtain from glamr::datim_dim_items("Technical Area")' |
value |
Type of value to return, MER Targets or Results or both |
disaggs |
Disaggregation Types. This depends on the value of ta |
dimensions |
Additional dimensions and/or columns. This depends on values of ta and disaggs |
property |
Type of name |
metadata |
Should metadata be included |
hierarchy |
Should additional hirarchy level be included |
baseurl |
DATIM API End point url |
verbose |
Display all notifications |
data as tibble
## Not run: library(grabr) datim_query(ou = "Nigeria", ta = "PLHIV") datim_query(ou = "Nigeria", ta = "POP_EST") datim_query(ou = "Nigeria", level = "country", ta = "TX_CURR", disaggs = "Age/Sex/HIVStatus", dimensions = c("Age: <15/15+ (Coarse)", "Sex")) ## End(Not run)
## Not run: library(grabr) datim_query(ou = "Nigeria", ta = "PLHIV") datim_query(ou = "Nigeria", ta = "POP_EST") datim_query(ou = "Nigeria", level = "country", ta = "TX_CURR", disaggs = "Age/Sex/HIVStatus", dimensions = c("Age: <15/15+ (Coarse)", "Sex")) ## End(Not run)
Query Datim SQLViews
datim_sqlviews( username, password, view_name = NULL, dataset = FALSE, datauid = NULL, query = NULL, baseurl = NULL )
datim_sqlviews( username, password, view_name = NULL, dataset = FALSE, datauid = NULL, query = NULL, baseurl = NULL )
username |
Datim username |
password |
Datim password |
view_name |
Datim SQLView name |
dataset |
Return SQLView dataset or uid? Default is false |
datauid |
Data UID |
query |
SQLView Query params, a list containing type and params key value pairs |
baseurl |
Datim API Base URL |
SQLView uid or dataset as data frame
This function should be used to identify Datim SQLView and Extract Data
## Not run: library(grabr) datim_sqlviews( username =glamr::datim_user(), password =glamr::datim_pwd(), view_name = "A list of OUs", dataset = TRUE ) ## End(Not run)
## Not run: library(grabr) datim_sqlviews( username =glamr::datim_user(), password =glamr::datim_pwd(), view_name = "A list of OUs", dataset = TRUE ) ## End(Not run)
Generate a API URL
gen_url( ou_uid, org_lvl, org_type = "facility", value_type = "results", is_hts = FALSE, fy_pd = NULL, baseurl = "https://final.datim.org/" )
gen_url( ou_uid, org_lvl, org_type = "facility", value_type = "results", is_hts = FALSE, fy_pd = NULL, baseurl = "https://final.datim.org/" )
ou_uid |
UID for the country, recommend using 'identify_ouuids()' |
org_lvl |
org hierarchy level, eg facility is level 7 in country X, recommend using 'identify_levels()' |
org_type |
organization type, either facility (default) or community |
value_type |
results (default) or targets |
is_hts |
is the API for HTS indicators (HTS_TST or HTS_TST_POS), default = FALSE |
fy_pd |
fiscal year(s) to cover, default will be current FY if not provided |
baseurl |
API base url, default = https://final.datim.org/ |
## Not run: #get OU UID ouuid <- identify_ouuids() %>% dplyr::filter(ou == "Ghana") #get facility level faclvl <- identify_levels("facility", username = myuser, password = mypwd()) %>% dplyr::filter(ou == "Ghana") #gen url myurl <- gen_url(ouuid, faclvl, org_type = facility) ## End(Not run)
## Not run: #get OU UID ouuid <- identify_ouuids() %>% dplyr::filter(ou == "Ghana") #get facility level faclvl <- identify_levels("facility", username = myuser, password = mypwd()) %>% dplyr::filter(ou == "Ghana") #gen url myurl <- gen_url(ouuid, faclvl, org_type = facility) ## End(Not run)
Get base url from a link
get_baseurl(url)
get_baseurl(url)
url |
DATIM API end points |
Base url without trailing slash
Other utility:
var_exists()
DATIM API Call for Targets
get_datim_data(url, username, password)
get_datim_data(url, username, password)
url |
supply url forAPI call, recommend using 'gen_url()' |
username |
DATIM username |
password |
DATIM password, recommend using 'mypwd()' |
## Not run: myurl <- paste0(baseurl, "api/29/analytics.json? dimension=LxhLO68FcXm:udCop657yzi& dimension=ou:LEVEL-4;HfVjCurKxh2& filter=pe:2018Oct& displayProperty=SHORTNAME&outputIdScheme=CODE") myuser <- "UserX" df_datim <- get_datim_data(myurl, myuser, mypwd(myuser)) ## End(Not run)
## Not run: myurl <- paste0(baseurl, "api/29/analytics.json? dimension=LxhLO68FcXm:udCop657yzi& dimension=ou:LEVEL-4;HfVjCurKxh2& filter=pe:2018Oct& displayProperty=SHORTNAME&outputIdScheme=CODE") myuser <- "UserX" df_datim <- get_datim_data(myurl, myuser, mypwd(myuser)) ## End(Not run)
DATIM API Call for Targets
get_datim_targets(url, username, password)
get_datim_targets(url, username, password)
url |
supply url forAPI call, recommend using'gen_url()' |
username |
DATIM username |
password |
DATIM password, recommend using 'mypwd()' |
## Not run: myurl <- paste0(baseurl, "api/29/analytics.json? dimension=LxhLO68FcXm:udCop657yzi& dimension=ou:LEVEL-4;HfVjCurKxh2& filter=pe:2018Oct& displayProperty=SHORTNAME&outputIdScheme=CODE") myuser <- "UserX" df_targets <- get_datim_targets(myurl, myuser, mypwd(myuser)) ## End(Not run)
## Not run: myurl <- paste0(baseurl, "api/29/analytics.json? dimension=LxhLO68FcXm:udCop657yzi& dimension=ou:LEVEL-4;HfVjCurKxh2& filter=pe:2018Oct& displayProperty=SHORTNAME&outputIdScheme=CODE") myuser <- "UserX" df_targets <- get_datim_targets(myurl, myuser, mypwd(myuser)) ## End(Not run)
Get all orgunits levels in org hierarchy
get_levels( username, password, expand = FALSE, reshape = FALSE, baseurl = "https://final.datim.org/" )
get_levels( username, password, expand = FALSE, reshape = FALSE, baseurl = "https://final.datim.org/" )
username |
DATIM username, recommed using glamr::datim_user()' |
password |
DATIM password, recommend using glamr::datim_pwd()' |
expand |
Fill in missing snu1 level? default is FALSE |
reshape |
Reshape data as long? default is FALSE |
baseurl |
base API url, default = https://final.datim.org/ |
df
Similar to 'grabr::identify_levels()' and 'grabr::get_outable()'
## Not run: library(grabr) # Get PEPFAR Org Levels get_levels() ## End(Not run)
## Not run: library(grabr) # Get PEPFAR Org Levels get_levels() ## End(Not run)
Get Org UIDS
get_orguids( level = 3, username, password, baseurl = "https://final.datim.org/" )
get_orguids( level = 3, username, password, baseurl = "https://final.datim.org/" )
level |
Org level |
username |
DATIM Username, recommend using glamr::datim_user()' |
password |
DATIM password, recommend using glamr::datim_pwd()' |
baseurl |
base url for the API, default = https://final.datim.org/ |
ORG UIDS as tibble
Use with caution. Use 'get_ouorguids()' for levels below 3
## Not run: library(grabr) # All orgunit level 3 uids + names orgs <- get_orguids(level = 3) ## End(Not run)
## Not run: library(grabr) # All orgunit level 3 uids + names orgs <- get_orguids(level = 3) ## End(Not run)
Identify OU/Org Label
get_ouorglabel( operatingunit, country = NULL, org_level = 4, username, password, baseurl = "https://final.datim.org/" )
get_ouorglabel( operatingunit, country = NULL, org_level = 4, username, password, baseurl = "https://final.datim.org/" )
operatingunit |
Operating unit |
country |
Country name |
org_level |
OU Org level, default is set to 4, PSNU |
username |
Datim account username |
password |
Datim account password |
baseurl |
Datim base url |
Org level label
## Not run: library(grabr) get_ouorglabel(operatingunit = "Zambia", org_level = 5) ## End(Not run)
## Not run: library(grabr) get_ouorglabel(operatingunit = "Zambia", org_level = 5) ## End(Not run)
Get OU Org level
get_ouorglevel( operatingunit, country = NULL, org_type = "prioritization", username, password, baseurl = "https://final.datim.org/" )
get_ouorglevel( operatingunit, country = NULL, org_type = "prioritization", username, password, baseurl = "https://final.datim.org/" )
operatingunit |
Operatingunit name |
country |
Country name (default = Operatingunit) |
org_type |
Orgunit type (country_lvl, prioritization, community, facility_lvl) |
username |
Datim Account username |
password |
Datim Account Password |
baseurl |
Datim Base URL |
uid
## Not run: library(grabr) cntry <- "Zambia" # Get country org level get_ouorglevel(cntry) # Get community org level get_ouorglevel(cntry, org_type = "community") ## End(Not run)
## Not run: library(grabr) cntry <- "Zambia" # Get country org level get_ouorglevel(cntry) # Get community org level get_ouorglevel(cntry, org_type = "community") ## End(Not run)
Get list of OU Orgs at specific level
get_ouorgs( ouuid, level = 4, username, password, baseurl = "https://final.datim.org/" )
get_ouorgs( ouuid, level = 4, username, password, baseurl = "https://final.datim.org/" )
ouuid |
OU uid |
level |
org level |
username |
DATIM Username |
password |
DATIM password, recommend using 'mypwd()' |
baseurl |
base url for the API, default = https://final.datim.org/ |
ORG UIDS as tibble
Use 'get_orguids()' for levels above 4
## Not run: library(grabr) cntry <- "Zambia" uid <- get_ouuid(cntry) lvl <- get_ouorglevel(cntry, org_type = "prioritization") orgs <- get_ouorgs(ouuid = uid, level = lvl) ## End(Not run)
## Not run: library(grabr) cntry <- "Zambia" uid <- get_ouuid(cntry) lvl <- get_ouorglevel(cntry, org_type = "prioritization") orgs <- get_ouorgs(ouuid = uid, level = lvl) ## End(Not run)
Get Orgs uids by level
get_ouorguids( ouuid, level, username, password, baseurl = "https://final.datim.org/" )
get_ouorguids( ouuid, level, username, password, baseurl = "https://final.datim.org/" )
ouuid |
Operatingunit uid |
level |
Orgunit level |
username |
Datim Account username |
password |
Datim Account Password |
baseurl |
Datim base url |
list of uids
## Not run: library(grabr) # Set country of interest cntry <- "Zambia" # Get OU/Country orgunit uid uid <- get_ouuid(cntry) # Get org level for psnu lvl <- get_ouorglevel(cntry, org_type = "prioritization") # Retreived all uids for level 4 (SNU1) get_ouorguids(ouuid = uid, level = 4) ## End(Not run)
## Not run: library(grabr) # Set country of interest cntry <- "Zambia" # Get OU/Country orgunit uid uid <- get_ouuid(cntry) # Get org level for psnu lvl <- get_ouorglevel(cntry, org_type = "prioritization") # Retreived all uids for level 4 (SNU1) get_ouorguids(ouuid = uid, level = 4) ## End(Not run)
'get_outable' pulls from DATIM to return a dataframe with all PEPFAR Operating Units and countries along with useful information for merging, eg ISO codes, and use in DATIM APIs, eg UIDs and hierarchy levels.
get_outable(username, password, baseurl = "https://final.datim.org/")
get_outable(username, password, baseurl = "https://final.datim.org/")
username |
DATIM Username, defaults to using glamr::datim_user()' if blank |
password |
DATIM password, defaults to using glamr::datim_pwd()' if blank |
baseurl |
base url for the API, default = https://final.datim.org/ |
'get_outtable' is a wrapper around 'identify_ouuids' and 'identify_levels' that pulls this information directly from DATIM. The user will need to have a DATIM account to access this data. You can take advantage of storing you credentials locally in a secure way using 'set_datim'.
data frame with all PEPFAR OUs, countries, their UIDs, ISO codes and different levels in the DATIM hierarchy
[set_datim()] to store DATIM authentication; [load_secrets()] to load credentials into session
## Not run: load_secrets() ou_table <- datim_outable() ## End(Not run)
## Not run: load_secrets() ou_table <- datim_outable() ## End(Not run)
Get Operatingunit / Country Org UID
get_ouuid( operatingunit, username, password, baseurl = "https://final.datim.org/" )
get_ouuid( operatingunit, username, password, baseurl = "https://final.datim.org/" )
operatingunit |
Operatingunit name |
username |
Datim Account username, recommend using glamr::datim_user()' |
password |
Datim Account Password, recommend using glamr::datim_pwd()' |
baseurl |
base url for the API, default = https://final.datim.org/ |
uid
## Not run: library(grabr) # get orgunit for specific OU/Country: kenya get_ouuid(operatingunit = "Kenya") ## End(Not run)
## Not run: library(grabr) # get orgunit for specific OU/Country: kenya get_ouuid(operatingunit = "Kenya") ## End(Not run)
Get OU Org UIDS
get_ouuids( add_details = FALSE, username, password, baseurl = "https://final.datim.org/" )
get_ouuids( add_details = FALSE, username, password, baseurl = "https://final.datim.org/" )
add_details |
Add countries for regional ou, default is false |
username |
DATIM Username, recommend using glamr::datim_user()' |
password |
DATIM password, recommend using glamr::datim_pwd()' |
baseurl |
base url for the API, default = https://final.datim.org/ |
OU UIDS as tibble
## Not run: library(grabr) # OU Org UIDs ous <- get_ouuids() ## End(Not run)
## Not run: library(grabr) # OU Org UIDs ous <- get_ouuids() ## End(Not run)
'identify_levels' pulls from DATIM to return a dataframe with all PEPFAR Operating Units and countries with their ISO codes and hierarhcy levels. This is one of two components that feeds into 'get_outable'.
identify_levels(username, password, baseurl = "https://final.datim.org/")
identify_levels(username, password, baseurl = "https://final.datim.org/")
username |
DATIM Username, defaults to using glamr::datim_user()' if blank |
password |
DATIM password, defaults to using glamr::datim_pwd()' if blank |
baseurl |
base url for the API, default = https://final.datim.org/ |
To access the UIDs, the user will need to have a DATIM account. You can take advantage of storing you credentials locally in a secure way using 'set_datim'.
[set_datim()] to store DATIM authentication; [load_secrets()] to load credentials into session
## Not run: #table for all OUs load_secrets() identify_levels() ## End(Not run)
## Not run: #table for all OUs load_secrets() identify_levels() ## End(Not run)
'identify_ouuids' pulls from DATIM to return a dataframe with all PEPFAR Operating Units and countries and their UIDs. This is one of two components that feeds into 'get_outable'.
identify_ouuids(username, password, baseurl = "https://final.datim.org/")
identify_ouuids(username, password, baseurl = "https://final.datim.org/")
username |
DATIM Username, defaults to using glamr::datim_user()' if blank |
password |
DATIM password, defaults to using glamr::datim_pwd()' if blank |
baseurl |
base url for the API, default = https://final.datim.org/ |
To access the UIDs, the user will need to have a DATIM account. You can take advantage of storing you credentials locally in a secure way using 'set_datim'.
Datim country names
[set_datim()] to store DATIM authentication; [load_secrets()] to load credentials into session
## Not run: load_secrets() ous <- identify_ouuids() ## End(Not run)
## Not run: load_secrets() ous <- identify_ouuids() ## End(Not run)
This function is useful within another function. It check whether a username or password has been provided by the user and, if not, checks if they were stored via 'glamr' or prompts user to provide credentials through interactive prompt.
lazy_secrets(service = c("datim", "pano", "pdap", "s3"), username, password)
lazy_secrets(service = c("datim", "pano", "pdap", "s3"), username, password)
service |
account, either datim", "pano", "pdap", or "s3" |
username |
account username or s3 access key |
password |
account password or s3 secret key |
returns a list of 2 - username/access and password/secret
## Not run: accnt <- lazy_secrets("datim", username = username, password = password) datim_dimensions(accnt$username, accnt$password) ## End(Not run)
## Not run: accnt <- lazy_secrets("datim", username = username, password = password) datim_dimensions(accnt$username, accnt$password) ## End(Not run)
Download file from PEPFAR Panorama
pano_download( item_url, username, password, session = NULL, dest_path = NULL, uncompress = FALSE )
pano_download( item_url, username, password, session = NULL, dest_path = NULL, uncompress = FALSE )
item_url |
URL for the item to be downlaoded |
username |
Username for PEPFAR Panorama Account. Recommend using 'pano_user()' |
password |
Password for PEPFAR Panorama Account. Recommend using 'pano_pwd()' |
session |
Login session, only used within other 'pano_extract_*()' |
dest_path |
Location and name of the destination file |
uncompress |
If yes, the downloaded zip file will be decompressed. Default is FALSE |
file content as binary
## Not run: library(tidyverse) library(grabr) library(glamr) url <- "https://pepfar-panorama.org/forms/downloads" elts <- pano_items(page_html = url, username = pano_user(), password = pano_pwd()) f_url <- elts %>% filter(type == "file zipfile") %>% pull(path) %>% first() pano_download(item_url = url, session = s, dest = "Data") ## End(Not run)
## Not run: library(tidyverse) library(grabr) library(glamr) url <- "https://pepfar-panorama.org/forms/downloads" elts <- pano_items(page_html = url, username = pano_user(), password = pano_pwd()) f_url <- elts %>% filter(type == "file zipfile") %>% pull(path) %>% first() pano_download(item_url = url, session = s, dest = "Data") ## End(Not run)
Extract data outputs from Panorama
pano_extract( item = "mer", version, fiscal_year, quarter, unpack = FALSE, username, password, session = NULL, baseurl = "https://pepfar-panorama.org" )
pano_extract( item = "mer", version, fiscal_year, quarter, unpack = FALSE, username, password, session = NULL, baseurl = "https://pepfar-panorama.org" )
item |
Panorama data type. Eg: mer, financial, sims, narratives |
version |
Data release version: Initial or Clean, defaults to current version if blank |
fiscal_year |
Reporting Fiscal year, defaults to current FY if blank |
quarter |
Reporting Quarter, defaults to current quarter if blank |
unpack |
If TRUE, unpack nested directories |
username |
Panorama username, recommend using 'glamr::pano_user()' |
password |
Panorama password, recommend using 'glamr::pano_pwd()' |
session |
Login session, only used within other 'pano_extract_*()' |
baseurl |
Panorama base url |
list of output files as data frame
This function combines 'pano_session()', 'pano_content()', 'pano_elements()', and in some cases 'pano_unpack()'
## Not run: library(tidyverse) library(grabr) pano_extract(item = "mer") ## End(Not run)
## Not run: library(tidyverse) library(grabr) pano_extract(item = "mer") ## End(Not run)
Downloads Country Specific MSDs
pano_extract_msd( operatingunit = NULL, version, fiscal_year, quarter, level = c("psnu", "ou", "site", "nat"), dest_path, username, password, baseurl = "https://pepfar-panorama.org" )
pano_extract_msd( operatingunit = NULL, version, fiscal_year, quarter, level = c("psnu", "ou", "site", "nat"), dest_path, username, password, baseurl = "https://pepfar-panorama.org" )
operatingunit |
PEPFAR Operating Unit. Default is set to NULL for to return global datasets |
version |
Data release version: "initial" or "clean", defaults to current version |
fiscal_year |
Reporting Fiscal year, defaults to current version |
quarter |
Reporting Quarter: Single digit quarters, defaults to current version |
level |
Org level, options are psnu" (default), "ou", "site", or "nat" |
dest_path |
Directory path to download file. Default set to 'si_path()' |
username |
Panorama username, recommend using 'glamr::pano_user()', which is the default if left blank |
password |
Panorama password, recommend using 'glamr::pano_pwd()', which is the default if left blank |
baseurl |
Pano base url |
## Not run: pano_extract_msd(operatingunit = "Zambia", level = "site") ## End(Not run)
## Not run: pano_extract_msd(operatingunit = "Zambia", level = "site") ## End(Not run)
Downloads All Global + OU Specific MSDs
pano_extract_msds( operatingunit, add_global = TRUE, items = "mer", archive = FALSE, dest_path, username, password, baseurl = "https://pepfar-panorama.org" )
pano_extract_msds( operatingunit, add_global = TRUE, items = "mer", archive = FALSE, dest_path, username, password, baseurl = "https://pepfar-panorama.org" )
operatingunit |
PEPFAR Operating Unit. Default is set to NULL for global datasets |
add_global |
Add global datasets in this extract? Default is TRUE |
items |
Panorama data set, default option is 'mer' |
archive |
Logical, should the old files be archived? default=FALSE |
dest_path |
Directory path to download file. Default set to 'glamr::si_path()' |
username |
Panorama username, recommend using 'glamr::pano_user()' |
password |
Panorama password, recommend using 'glamr::pano_pwd()' |
baseurl |
Panorama base url, default="https://pepfar-panorama.org" |
## Not run: dir_mer <- si_path() pano_extract_msds(operatingunit = "Zambia", archive = TRUE, dest_path = dir_mer) ## End(Not run)
## Not run: dir_mer <- si_path() pano_extract_msds(operatingunit = "Zambia", archive = TRUE, dest_path = dir_mer) ## End(Not run)
Extract data items from url
pano_items(page_url, username, password, session = NULL)
pano_items(page_url, username, password, session = NULL)
page_url |
Current html page url |
username |
Username for PEPFAR Panorama Account. Recommend using 'pano_user()' |
password |
Password for PEPFAR Panorama Account. Recommend using 'pano_pwd()' |
session |
Login session, only used within other 'pano_extract_*()' |
data items as data frame
## Not run: library(grabr) s <- pano_session("<my-pano-user>", "<my-password>") url <- "https://pepfar-panorama.org/forms/downloads" items <- pano_items(page_url = url, session = s) ## End(Not run)
## Not run: library(grabr) s <- pano_session("<my-pano-user>", "<my-password>") url <- "https://pepfar-panorama.org/forms/downloads" items <- pano_items(page_url = url, session = s) ## End(Not run)
Create an active session for PEPFAR Panorama
pano_session(username, password, baseurl = "https://pepfar-panorama.org")
pano_session(username, password, baseurl = "https://pepfar-panorama.org")
username |
Username for PEPFAR Panorama Account. Recommend using 'pano_user()' |
password |
Password for PEPFAR Panorama Account. Recommend using 'pano_pwd()' |
baseurl |
PEPFAR Panorama base url |
login session
## Not run: library(grabr) s <- pano_session("<my-pano-user>", "<my-password>") ## End(Not run)
## Not run: library(grabr) s <- pano_session("<my-pano-user>", "<my-password>") ## End(Not run)
Pull Partner/Mechanism Info from DATIM
pull_mech(usaid_only = TRUE, ou_sel = NULL, folderpath_output = NULL)
pull_mech(usaid_only = TRUE, ou_sel = NULL, folderpath_output = NULL)
usaid_only |
specify if only USAID mechansism should be returned, default = TRUE |
ou_sel |
option to specify an operating unit, default = NULL |
folderpath_output |
provide the full path to the folder for saving |
## Not run: #pull mechanism/partner information df <- pull_mech() ## End(Not run)
## Not run: #pull mechanism/partner information df <- pull_mech() ## End(Not run)
Extract DATIM Results and Targets (DATIM API Call)
pull_mer( ou_name = NULL, username, password, baseurl = "https://final.datim.org/", fy_pd = NULL, quarters_complete = NULL, folderpath_output = NULL )
pull_mer( ou_name = NULL, username, password, baseurl = "https://final.datim.org/", fy_pd = NULL, quarters_complete = NULL, folderpath_output = NULL )
ou_name |
Operating Unit name, if mechanism is not specified |
username |
DATIM username |
password |
DATIM password, recommend using 'mypwd()' |
baseurl |
API base url, default = https://final.datim.org/ |
fy_pd |
fiscal year(s) to cover, default will be current FY if not provided |
quarters_complete |
no. of quarters completed through FY to determine weeks left in year |
folderpath_output |
folder path to store DATIM output, default = NULL |
## Not run: #ou mer data myuser <- "UserX" mech_x_dta <- pull_mer(ou_name = "Namibia", username = myuser, password = mypwd(myuser)) ## End(Not run)
## Not run: #ou mer data myuser <- "UserX" mech_x_dta <- pull_mer(ou_name = "Namibia", username = myuser, password = mypwd(myuser)) ## End(Not run)
Get S3 Buckets list
s3_buckets(access_key, secret_key, ...)
s3_buckets(access_key, secret_key, ...)
access_key |
S3 Access Key ID |
secret_key |
S3 Secret Access Key |
... |
Additional arguments passed to s3HTTP |
S3 Backets list as tibble
## Not run: s3_buckets() ## End(Not run)
## Not run: s3_buckets() ## End(Not run)
Download S3 Objects
s3_download(bucket, object, filepath = NULL, access_key, secret_key, ...)
s3_download(bucket, object, filepath = NULL, access_key, secret_key, ...)
bucket |
S3 Bucket name |
object |
S3 Object key (id) |
filepath |
Full path of destination file |
access_key |
S3 Access key id |
secret_key |
S3 Secret Access key |
... |
Additional aws.S3::save_object() options |
file name
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_download() ## End(Not run)
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_download() ## End(Not run)
Read sheets from S3 Objects / Excel
s3_excel_sheets(bucket, object_key, access_key, secret_key)
s3_excel_sheets(bucket, object_key, access_key, secret_key)
bucket |
S3 Bucket |
object_key |
S3 Object Key |
access_key |
S3 Access Key |
secret_key |
S3 Secret Key |
Excel sheets as data frame
## Not run: s3_objects( bucket = "sample-bucket", prefix = "ddc/xyz/ABC") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_excel_sheets(bucket = "<sample-bucket>", object_key = .) ## End(Not run)
## Not run: s3_objects( bucket = "sample-bucket", prefix = "ddc/xyz/ABC") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_excel_sheets(bucket = "<sample-bucket>", object_key = .) ## End(Not run)
Idenfity S3 Object type
s3_object_type(object)
s3_object_type(object)
object |
S3 Object key |
file type: text, csv, excel, json, python, shell
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_object_type() ## End(Not run)
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_object_type() ## End(Not run)
Get S3 Bucket objects list
s3_objects( bucket, prefix = NULL, n = 1000, unpack_keys = FALSE, access_key, secret_key, ... )
s3_objects( bucket, prefix = NULL, n = 1000, unpack_keys = FALSE, access_key, secret_key, ... )
bucket |
S3 Bucket name |
prefix |
Limits response by key. Default set to NULL |
n |
Max number of record, default = 1000 |
unpack_keys |
Separate key column, default is false |
access_key |
S3 Access Key ID |
secret_key |
S3 Secret Access Key |
... |
Additional aws.s3::get_bucket_df() options |
S3 Objects list as tibble
## Not run: s3_objects("sample-bucket") ## End(Not run)
## Not run: s3_objects("sample-bucket") ## End(Not run)
Read content of S3 Objects
s3_read_object(bucket, object, sheet = NULL, access_key, secret_key, ...)
s3_read_object(bucket, object, sheet = NULL, access_key, secret_key, ...)
bucket |
S3 Bucket name |
object |
S3 Object key (id) |
sheet |
S3 Excel object sheet name / index |
access_key |
S3 Access key id |
secret_key |
S3 Secret Access key |
... |
Additional arguments passed to s3HTTP |
df
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_read_object(bucket = "sample-bucket", object_key = .) ## End(Not run)
## Not run: s3_objects("sample-bucket") %>% filter(str_detect(key, "^HFR")) %>% pull(key) %>% first() %>% s3_read_object(bucket = "sample-bucket", object_key = .) ## End(Not run)
Remove objects from S3 bucket
s3_remove(objects, bucket, access_key, secret_key, ...)
s3_remove(objects, bucket, access_key, secret_key, ...)
objects |
S3 object keys (full path) |
bucket |
S3 backet name |
access_key |
S3 Access Key |
secret_key |
S3 Secret Key |
... |
Additional aws.S3::delete_object() options |
boolean
## Not run: df_objects %>% pull(key) %>% first() %>% s3_remove(objects = ., bucket = "test-bkt") ## End(Not run)
## Not run: df_objects %>% pull(key) %>% first() %>% s3_remove(objects = ., bucket = "test-bkt") ## End(Not run)
Unpack Objects Key
s3_unpack_keys(df_objects, rmv_sysfiles = TRUE, rmv_hidden = TRUE)
s3_unpack_keys(df_objects, rmv_sysfiles = TRUE, rmv_hidden = TRUE)
df_objects |
S3 objects as df |
rmv_sysfiles |
Remove System object (logs, git folders, etc.)? |
Remove Hidden objects (folders/files starting with dot) ? |
S3 Cleaned Objects list as tibble
## Not run: s3_objects("sample-bucket") %>% s3_unpack_keys() ## End(Not run)
## Not run: s3_objects("sample-bucket") %>% s3_unpack_keys() ## End(Not run)
Upload file to S3 Bucket
s3_upload( filepath, bucket, prefix = NULL, object = NULL, access_key, secret_key, ... )
s3_upload( filepath, bucket, prefix = NULL, object = NULL, access_key, secret_key, ... )
filepath |
Source file path |
bucket |
S3 backet name |
prefix |
S3 Prefix (folder structure). Default set to NULL |
object |
Destination S3 object name (with file extention) |
access_key |
S3 Access Key |
secret_key |
S3 Secret Key |
... |
Additional aws.S3::put_object() options |
boolean
## Not run: filepath %>% s3_upload(bucket = "test-bkt") ## End(Not run)
## Not run: filepath %>% s3_upload(bucket = "test-bkt") ## End(Not run)
Check if variable exist
var_exists(df, var)
var_exists(df, var)
df |
data frame to check against |
var |
quoted variable of interest |
Other utility:
get_baseurl()
## Not run: var_exists(df, "val") ## End(Not run)
## Not run: var_exists(df, "val") ## End(Not run)
This function provides streamlined API access to PDAP Wave data, the successor to DATIM Genie. PDAP Wave API simplifies the requests that previously needed to be made with DATIM and returns a dataset back that matches the MSD structure. Further documentation can be found at https://wave.test.pdap.pepfar.net/api/docs#/.
wave_process_query( request_body, folderpath_dwnld = "Data", psd_type = c("psnu_im", "ou_im", "site_im"), request_type = c("POST", "GET"), username, password )
wave_process_query( request_body, folderpath_dwnld = "Data", psd_type = c("psnu_im", "ou_im", "site_im"), request_type = c("POST", "GET"), username, password )
request_body |
elements to pass into the PDAP Wave POST API |
folderpath_dwnld |
where to download, default = "Data" |
psd_type |
Type of PEPFAR Structured dataset: "psnu_im" (default), "ou_im", or "site_im" |
request_type |
API request type: "POST" (default) or "GET |
username |
DATIM username, if blank looks for stored credentials
( |
password |
DATIM password, if blank looks for stored credentials
( |
Users must pass their query filter in a list form into request_body
,
which matches what you would manually do in Genie previously. You can proceed
with either POST or GET requests to access PSNUxIM, OUxIM, and SitexIM data.
This function was adapted from code developed and shared by Derek Wood (GHSD/PRIME).
list of request and stored data in zip
## Not run: library(tidyverse) library(glamr) #get country uid for API cntry_uid <- pepfar_country_list %>% filter(country == "Tanzania") %>% pull(country_uid) #establish parameters to pass into POST API post_body <- list( daily_frozen='daily', fiscal_year=list(2023, 2024), funding_agency = list("USAID"), indicator=list("TX_CURR","TX_ML","TX_CURR_LAG2", "TX_NET_NEW","TX_NEW", "TX_RTT","PMTCT_STAT", "PMTCT_STAT_POS", "PMTCT_ART"), uid_hierarchy_list=list(str_glue('-|-|{cntry_uid}'))) #run POST API wave_process_query(post_body) #load data df_wave <- return_latest("Data") %>% read_psd() ## End(Not run)
## Not run: library(tidyverse) library(glamr) #get country uid for API cntry_uid <- pepfar_country_list %>% filter(country == "Tanzania") %>% pull(country_uid) #establish parameters to pass into POST API post_body <- list( daily_frozen='daily', fiscal_year=list(2023, 2024), funding_agency = list("USAID"), indicator=list("TX_CURR","TX_ML","TX_CURR_LAG2", "TX_NET_NEW","TX_NEW", "TX_RTT","PMTCT_STAT", "PMTCT_STAT_POS", "PMTCT_ART"), uid_hierarchy_list=list(str_glue('-|-|{cntry_uid}'))) #run POST API wave_process_query(post_body) #load data df_wave <- return_latest("Data") %>% read_psd() ## End(Not run)