Skip to content

Commit

Permalink
Merge pull request #87 from IDEMSInternational/feat/campaigns
Browse files Browse the repository at this point in the history
Support for campaigns
  • Loading branch information
geoo89 authored Jun 24, 2023
2 parents f696900 + 033db1d commit 69205eb
Show file tree
Hide file tree
Showing 23 changed files with 637 additions and 90 deletions.
2 changes: 1 addition & 1 deletion main.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def main():

tag_matcher = TagMatcher(args.tags)
ci_parser = ContentIndexParser(sheet_reader, args.datamodels, tag_matcher=tag_matcher)
output = ci_parser.parse_all_flows()
output = ci_parser.parse_all()
json.dump(output.render(), open(args.output, 'w'), indent=4)


Expand Down
1 change: 1 addition & 0 deletions parsers/common/cellparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def parse_as_string(self, value, context={}, is_object=None):
# is not to be processed any further.
if value is None:
return ''
value = str(value)
if context is None or (not context and '{' not in value):
# This is a hacky optimization.
return value
Expand Down
1 change: 1 addition & 0 deletions parsers/common/sheetparser.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import copy
from .rowdatasheet import RowDataSheet
from logger.logger import get_logger, logging_context
import pydantic

LOGGER = get_logger()

Expand Down
33 changes: 33 additions & 0 deletions parsers/creation/campaigneventrowmodel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from parsers.common.rowparser import ParserModel
from pydantic import validator
from typing import List

class CampaignEventRowModel(ParserModel):
uuid: str = ''
offset: str
unit: str
event_type: str
delivery_hour: str = ''
message: str = ''
relative_to: str
start_mode: str
flow: str = ''
base_language: str = ''

@validator('unit')
def validate_unit(cls, v):
if v not in ['M', 'H', 'D', 'W']:
raise ValueError('unit must be M (minute), H (hour), D (day) or W (week)')
return v

@validator('start_mode')
def validate_start_mode(cls, v):
if v not in ['I', 'S', 'P']:
raise ValueError("start_mode must be I (interrupt current flow), S (skip event if in flow) or P (send message and don't affect flow)")
return v

@validator('event_type')
def validate_event_type(cls, v):
if v not in ['M', 'F']:
raise ValueError('event_type must be F (flow) or M (message)')
return v
35 changes: 35 additions & 0 deletions parsers/creation/campaignparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import importlib
from collections import OrderedDict
from .campaigneventrowmodel import CampaignEventRowModel
from rapidpro.models.campaigns import Campaign, CampaignEvent

from logger.logger import get_logger, logging_context

LOGGER = get_logger()


class CampaignParser:
def __init__(self, name, group_name, rows):
self.campaign = Campaign(name, group_name=group_name)
self.rows = rows

def parse(self):
for row_idx, row in enumerate(self.rows):
with logging_context(f"row {row_idx+2}"):
message = None
base_language = None
if row.message:
message = {"eng" : row.message}
base_language = row.base_language or "eng"
delivery_hour = -1
if row.delivery_hour:
delivery_hour = int(row.delivery_hour)
try:
event = CampaignEvent(
int(row.offset), row.unit, row.event_type, delivery_hour, row.start_mode,
relative_to_label=row.relative_to, flow_name=row.flow or None,
message=message, base_language=base_language)
self.campaign.add_event(event)
except ValueError as e:
LOGGER.critical(str(e))
return self.campaign
30 changes: 26 additions & 4 deletions parsers/creation/contentindexparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from parsers.creation.tagmatcher import TagMatcher
from rapidpro.models.containers import RapidProContainer
from parsers.creation.flowparser import FlowParser
from parsers.creation.campaignparser import CampaignParser
from parsers.creation.campaigneventrowmodel import CampaignEventRowModel
from logger.logger import get_logger, logging_context

LOGGER = get_logger()
Expand All @@ -26,6 +28,7 @@ def __init__(self, sheet_reader, user_data_model_module_name=None, tag_matcher=
self.template_sheets = {} # values: tablib tables
self.data_sheets = {} # values: OrderedDicts of RowModels
self.flow_definition_rows = [] # list of ContentIndexRowModel
self.campaign_definition_rows = [] # list of ContentIndexRowModel
if user_data_model_module_name:
self.user_models_module = importlib.import_module(user_data_model_module_name)
main_sheet = self.sheet_reader.get_main_sheet()
Expand Down Expand Up @@ -63,6 +66,10 @@ def process_content_index_table(self, content_index_table, content_index_name):
self.template_sheets[sheet_name] = TemplateSheet(sheet, row.template_argument_definitions)
if row.type == 'create_flow':
self.flow_definition_rows.append((logging_prefix, row))
elif row.type == 'create_campaign':
if not len(row.sheet_name) == 1:
LOGGER.critical('For create_campaign rows, exactly one sheet_name has to be specified')
self.campaign_definition_rows.append((logging_prefix, row))
else:
LOGGER.error(f'invalid type: "{row.type}"')

Expand Down Expand Up @@ -104,16 +111,32 @@ def get_template_sheet(self, name):
return self.template_sheets[name]

def get_node_group(self, template_name, data_sheet, data_row_id, template_arguments):
# TODO: Factor out logic duplication between this function and parse_all_flows.
if (data_sheet and data_row_id) or (not data_sheet and not data_row_id):
flow_name = template_name
with logging_context(f'{template_name}'):
return self.parse_flow(template_name, data_sheet, data_row_id, template_arguments, RapidProContainer(), parse_as_block=True)
else:
LOGGER.critical(f'For insert_as_block, either both data_sheet and data_row_id or neither have to be provided.')

def parse_all_flows(self):
def parse_all(self):
rapidpro_container = RapidProContainer()
self.parse_all_flows(rapidpro_container)
self.parse_all_campaigns(rapidpro_container)
return rapidpro_container

def parse_all_campaigns(self, rapidpro_container):
for logging_prefix, row in self.campaign_definition_rows:
sheet_name = row.sheet_name[0]
with logging_context(f'{logging_prefix} | {sheet_name}'):
sheet = self.sheet_reader.get_sheet(sheet_name)
row_parser = RowParser(CampaignEventRowModel, CellParser())
sheet_parser = SheetParser(row_parser, sheet)
rows = sheet_parser.parse_all()
campaign_parser = CampaignParser(row.new_name or sheet_name, row.group, rows)
campaign = campaign_parser.parse()
rapidpro_container.add_campaign(campaign)

def parse_all_flows(self, rapidpro_container):
for logging_prefix, row in self.flow_definition_rows:
with logging_context(f'{logging_prefix} | {row.sheet_name[0]}'):
if row.data_sheet and not row.data_row_id:
Expand All @@ -124,8 +147,7 @@ def parse_all_flows(self):
elif not row.data_sheet and row.data_row_id:
LOGGER.critical(f'For create_flow, if data_row_id is provided, data_sheet must also be provided.')
else:
self.parse_flow(row.sheet_name[0], row.data_sheet, row.data_row_id, row.template_arguments, rapidpro_container, row.new_name)
return rapidpro_container
self.parse_flow(row.sheet_name[0], row.data_sheet, row.data_row_id, row.template_arguments, rapidpro_container, row.new_name)

def parse_flow(self, sheet_name, data_sheet, data_row_id, template_arguments, rapidpro_container, new_name='', parse_as_block=False):
base_name = new_name or sheet_name
Expand Down
1 change: 1 addition & 0 deletions parsers/creation/contentindexrowmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class ContentIndexRowModel(ParserModel):
template_argument_definitions: List[TemplateArgument] = [] # internal name
template_arguments: list = []
data_model: str = ''
group: str = ''
status: str = ''
tags: List[str] = []

Expand Down
80 changes: 68 additions & 12 deletions parsers/creation/tests/test_contentindexparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,15 +157,15 @@ def test_generate_flows(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.nestedmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.compare_messages(render_output, 'my_basic_flow', ['Some text'])
self.compare_messages(render_output, 'my_template - row1', ['Value1', 'Happy1 and Sad1'])
self.compare_messages(render_output, 'my_template - row2', ['Value2', 'Happy2 and Sad2'])

sheet_reader = MockSheetReader(ci_sheet_alt, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.nestedmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.compare_messages(render_output, 'my_basic_flow', ['Some text'])
self.compare_messages(render_output, 'my_template - row1', ['Value1', 'Happy1 and Sad1'])
Expand Down Expand Up @@ -196,7 +196,7 @@ def test_bulk_flows_with_args(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.nestedmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.compare_messages(render_output, 'my_renamed_template - row1', ['Value1 ARG1 ARG2', 'Happy1 and Sad1'])
self.compare_messages(render_output, 'my_renamed_template - row2', ['Value2 ARG1 ARG2', 'Happy2 and Sad2'])
Expand Down Expand Up @@ -239,7 +239,7 @@ def test_insert_as_block(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.nestedmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
messages_exp = [
'Some text',
Expand Down Expand Up @@ -298,7 +298,7 @@ def test_insert_as_block_with_sheet_arguments(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.listmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
messages_exp = [
'Some text',
Expand Down Expand Up @@ -345,7 +345,7 @@ def test_insert_as_block_with_arguments(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.listmodel')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
messages_exp = [
'value1 default2',
Expand Down Expand Up @@ -387,7 +387,7 @@ def test_eval(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
messages_exp = [
'hello', 'yes',
Expand Down Expand Up @@ -422,7 +422,7 @@ def test_tags(self):

sheet_reader = MockSheetReader(ci_sheet, sheet_dict)
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels')
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(self.get_flow_names(render_output), {"flow-world", "flow-t1", "flow-b1", "flow-t2", "flow-b2", "flow-t1t2", "flow-t1b2", "flow-b1t2"})
self.compare_messages(render_output, 'flow-world', ['Hello World'])
Expand All @@ -436,25 +436,81 @@ def test_tags(self):

tag_matcher = TagMatcher(["1", "tag1"])
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels', tag_matcher)
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(self.get_flow_names(render_output), {"flow-world", "flow-t1", "flow-t2", "flow-b2", "flow-t1t2", "flow-t1b2"})

tag_matcher = TagMatcher(["1", "tag1", "bag1"])
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels', tag_matcher)
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(self.get_flow_names(render_output), {"flow-world", "flow-t1", "flow-b1", "flow-t2", "flow-b2", "flow-t1t2", "flow-t1b2", "flow-b1t2"})

tag_matcher = TagMatcher(["1", "tag1", "2", "tag2"])
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels', tag_matcher)
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(self.get_flow_names(render_output), {"flow-world", "flow-t1","flow-t2","flow-t1t2"})

tag_matcher = TagMatcher(["5", "tag1", "bag1"])
ci_parser = ContentIndexParser(sheet_reader, 'parsers.creation.tests.datarowmodels.evalmodels', tag_matcher)
container = ci_parser.parse_all_flows()
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(self.get_flow_names(render_output), {"flow-world", "flow-t1", "flow-b1", "flow-t2", "flow-b2", "flow-t1t2", "flow-t1b2", "flow-b1t2"})


class TestParseCampaigns(unittest.TestCase):

def test_parse_flow_campaign(self):
ci_sheet = (
'type,sheet_name,new_name,group\n'
'create_campaign,my_campaign,renamed_campaign,My Group\n'
'create_flow,my_basic_flow,,\n'
)
my_campaign = (
'offset,unit,event_type,delivery_hour,message,relative_to,start_mode,flow\n'
'15,H,F,,,Created On,I,my_basic_flow\n'
)
my_basic_flow = (
'row_id,type,from,message_text\n'
',send_message,start,Some text\n'
)

sheet_reader = MockSheetReader(ci_sheet, {'my_campaign' : my_campaign, 'my_basic_flow' : my_basic_flow})
ci_parser = ContentIndexParser(sheet_reader)
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(render_output["campaigns"][0]["name"], "renamed_campaign")
self.assertEqual(render_output["campaigns"][0]["group"]["name"], "My Group")
event = render_output["campaigns"][0]["events"][0]
self.assertEqual(event["offset"], 15)
self.assertEqual(event["unit"], 'H')
self.assertEqual(event["event_type"], 'F')
self.assertEqual(event["delivery_hour"], -1)
self.assertEqual(event["message"], None)
self.assertEqual(event["relative_to"], {'label' : 'Created On', 'key' : 'created_on'})
self.assertEqual(event["start_mode"], 'I')
self.assertEqual(event["flow"]["name"], 'my_basic_flow')
self.assertEqual(event["flow"]["uuid"], render_output["flows"][0]["uuid"])
self.assertIsNone(event.get('base_language'))

def test_parse_message_campaign(self):
ci_sheet = (
'type,sheet_name,new_name,group\n'
'create_campaign,my_campaign,,My Group\n'
)
my_campaign = (
'offset,unit,event_type,delivery_hour,message,relative_to,start_mode,flow\n'
'150,D,M,12,Messagetext,Created On,I,\n'
)

sheet_reader = MockSheetReader(ci_sheet, {'my_campaign' : my_campaign})
ci_parser = ContentIndexParser(sheet_reader)
container = ci_parser.parse_all()
render_output = container.render()
self.assertEqual(render_output["campaigns"][0]["name"], "my_campaign")
event = render_output["campaigns"][0]["events"][0]
self.assertEqual(event["event_type"], 'M')
self.assertEqual(event["delivery_hour"], 12)
self.assertEqual(event["message"], {'eng': 'Messagetext'})
self.assertEqual(event["base_language"], 'eng')
Loading

0 comments on commit 69205eb

Please sign in to comment.