cowidev.hosp#

cowidev.hosp.__main__#

cowidev.hosp.__main__.run_step(args)[source]#

cowidev.hosp._parser#

cowidev.hosp._parser._parse_args()[source]#

cowidev.hosp.countries#

cowidev.hosp.etl#

class cowidev.hosp.etl.HospETL[source]#

Bases: object

_build_metadata(metadata)[source]#

Build metadata dataframe (to be exported later to locations.csv).

_build_time_df(execution)[source]#

Build execution time dataframe.

_check_fields_df(df)[source]#

Check format of the data collected for a certain location.

_execution_summary(t0, modules_execution_results)[source]#

Print a summary from the execution (timings).

_extract_entity(module_name: str)[source]#

Execute the process to get the data for a certain location (country).

extract(modules: list, parallel: bool = False, n_jobs: int = -2, modules_skip: list = [])[source]#

Get the data for all locations.

  • Build preliminary dataframe with all locations data.

  • Build metadata dataframe with locations metadata (source url, source name, etc.)

extract_collect(parallel, n_jobs, modules)[source]#

Collects data for all countries

extract_export_checkpoint(modules_execution_results)[source]#

Exports downloaded data and metadata.

extract_process()[source]#

Load checkpointed data.

load(df: DataFrame, output_path: str) None[source]#
pipe_metadata(df)[source]#
pipe_per_million(df)[source]#
pipe_round_values(df)[source]#
run(parallel: bool, n_jobs: int, modules, modules_skip=[])[source]#
transform(df: DataFrame)[source]#
transform_meta(df_meta: DataFrame, df: DataFrame, locations_path: str)[source]#
cowidev.hosp.etl.run_etl(parallel: bool, n_jobs: int, modules: list, modules_skip: list = [])[source]#

cowidev.hosp.grapher#

cowidev.hosp.grapher._date_to_owid_year(df)[source]#
cowidev.hosp.grapher._owid_format(df)[source]#
cowidev.hosp.grapher.run_db_updater()[source]#
cowidev.hosp.grapher.run_grapheriser()[source]#