diff --git a/docs/database.dbml b/docs/database.dbml index 3e8fb08..dc96216 100644 --- a/docs/database.dbml +++ b/docs/database.dbml @@ -1,34 +1,14 @@ -Enum "approval_status" { - "Pending" - "Approved" - "Rejected" +Table "approval_status" { + "status" "character varying" [pk, not null] } -Enum "bna_region" { - "Mid-Atlantic" - "Midwest" - "Mountain" - "New England" - "Pacific" - "South" -} - -Enum "brokenspoke_status" { - "pending" - "started" - "complete" -} - -Enum "brokenspoke_step" { - "sqs_message" - "setup" - "analysis" - "cleanup" +Table "bna_region" { + "name" "character varying" [pk, not null] } Table "brokenspoke_pipeline" { "state_machine_id" uuid [pk, not null] - "step" brokenspoke_step + "step" "character varying" "sqs_message" json "fargate_task_arn" "character varying" "s3_bucket" "character varying" @@ -39,8 +19,16 @@ Table "brokenspoke_pipeline" { "cost" numeric } +Table "brokenspoke_status" { + "status" "character varying" [pk, not null] +} + +Table "brokenspoke_step" { + "step" "character varying" [pk, not null] +} + Table "census" { - "census_id" integer [pk, not null] + "id" integer [pk, not null] "city_id" uuid [not null] "created_at" timestamp [not null, default: `CURRENT_TIMESTAMP`] "fips_code" "character varying" [not null] @@ -48,13 +36,13 @@ Table "census" { "population" integer [not null] Indexes { - census_id [type: btree, name: "census_census_id_idx"] city_id [type: btree, name: "census_city_id_idx"] + id [type: btree, name: "census_id_idx"] } } Table "city" { - "city_id" uuid [unique, not null] + "id" uuid [unique, not null] "country" "character varying" [not null] "state" "character varying" [not null] "name" "character varying" [not null] @@ -68,12 +56,12 @@ Table "city" { Indexes { (country, state, name) [pk, name: "city_pkey"] - city_id [type: btree, name: "city_city_id_idx"] + id [type: btree, name: "city_id_idx"] } } Table "core_services" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "dentists" doubleprecision "doctors" doubleprecision "grocery" doubleprecision @@ -84,8 +72,7 @@ Table "core_services" { } Table "country" { - "country_id" integer [pk, not null] - "name" "character varying" [unique, not null] + "name" "character varying" [pk, not null] } Table "fargate_price" { @@ -95,20 +82,20 @@ Table "fargate_price" { } Table "features" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "people" doubleprecision "retail" doubleprecision "transit" doubleprecision } Table "infrastructure" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "low_stress_miles" doubleprecision "high_stress_miles" doubleprecision } Table "opportunity" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "employment" doubleprecision "higher_education" doubleprecision "k12_education" doubleprecision @@ -117,7 +104,7 @@ Table "opportunity" { } Table "recreation" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "community_centers" doubleprecision "parks" doubleprecision "recreation_trails" doubleprecision @@ -130,35 +117,33 @@ Table "seaql_migrations" { } Table "speed_limit" { - "speed_limit_id" integer [pk, not null] + "id" integer [pk, not null] "city_id" uuid [not null] "created_at" timestamp [not null, default: `CURRENT_TIMESTAMP`] "residential" integer [not null] Indexes { city_id [type: btree, name: "speed_limit_city_id_idx"] - speed_limit_id [type: btree, name: "speed_limit_speed_limit_id_idx"] + id [type: btree, name: "speed_limit_id_idx"] } } Table "state_region_crosswalk" { - "state" "character varying" [pk, not null] - "region" bna_region [not null] -} + "state" "character varying" [not null] + "region" "character varying" [not null] -Table "state_speed_limit" { - "state_abbrev" "character (2)" [pk, not null] - "state_fips_code" "character (2)" [not null] - "speed" integer [not null] - "created_at" timestamp [not null, default: `CURRENT_TIMESTAMP`] - "updated_at" timestamp + Indexes { + (state, region) [pk, name: "state_region_crosswalk_pkey"] + region [type: btree, name: "state_region_crosswalk_region_idx"] + state [type: btree, name: "state_region_crosswalk_state_idx"] + } } Table "submission" { "id" integer [pk, not null] "first_name" "character varying" [not null] "last_name" "character varying" [not null] - "title" "character varying" + "occupation" "character varying" "organization" "character varying" "email" "character varying" [not null] "country" "character varying" [not null] @@ -166,30 +151,52 @@ Table "submission" { "region" "character varying" "fips_code" "character varying" [not null, default: `'0'::charactervarying`] "consent" boolean [not null] - "status" approval_status [not null, default: `'Pending'::public.approval_status`] + "status" "character varying" [not null] "created_at" timestamp [not null, default: `CURRENT_TIMESTAMP`] } Table "summary" { - "bna_uuid" uuid [pk, not null] + "bna_id" uuid [pk, not null] "city_id" uuid [not null] "created_at" timestamp [not null, default: `CURRENT_TIMESTAMP`] "score" doubleprecision [not null] "version" "character varying" [not null] } -Ref "census_city_id_fkey":"city"."city_id" < "census"."city_id" [delete: cascade] +Table "us_state" { + "name" "character varying" [pk, not null] + "abbrev" "character varying" [unique, not null] + "fips_code" "character (2)" [unique, not null] + "speed_limit" integer [not null] + + Indexes { + abbrev [type: btree, name: "us_state_abbrev_idx"] + fips_code [type: btree, name: "us_state_fips_code_idx"] + } +} + +Ref "census_city_id_fkey":"city"."id" < "census"."city_id" [delete: cascade] + +Ref "city_country_fkey":"country"."name" < "city"."country" + +Ref "core_services_bna_id_fkey":"summary"."bna_id" < "core_services"."bna_id" [delete: cascade] + +Ref "features_bna_id_fkey":"summary"."bna_id" < "features"."bna_id" [delete: cascade] + +Ref "infrastructure_bna_id_fkey":"summary"."bna_id" < "infrastructure"."bna_id" [delete: cascade] + +Ref "opportunity_bna_id_fkey":"summary"."bna_id" < "opportunity"."bna_id" [delete: cascade] -Ref "core_services_bna_uuid_fkey":"summary"."bna_uuid" < "core_services"."bna_uuid" [delete: cascade] +Ref "recreation_bna_id_fkey":"summary"."bna_id" < "recreation"."bna_id" [delete: cascade] -Ref "features_bna_uuid_fkey":"summary"."bna_uuid" < "features"."bna_uuid" [delete: cascade] +Ref "speed_limit_city_id_fkey":"city"."id" < "speed_limit"."city_id" [delete: cascade] -Ref "infrastructure_bna_uuid_fkey":"summary"."bna_uuid" < "infrastructure"."bna_uuid" [delete: cascade] +Ref "state_region_crosswalk_region_fkey":"bna_region"."name" < "state_region_crosswalk"."region" -Ref "opportunity_bna_uuid_fkey":"summary"."bna_uuid" < "opportunity"."bna_uuid" [delete: cascade] +Ref "state_region_crosswalk_state_fkey":"us_state"."name" < "state_region_crosswalk"."state" -Ref "recreation_bna_uuid_fkey":"summary"."bna_uuid" < "recreation"."bna_uuid" [delete: cascade] +Ref "submission_country_fkey":"country"."name" < "submission"."country" -Ref "speed_limit_city_id_fkey":"city"."city_id" < "speed_limit"."city_id" [delete: cascade] +Ref "submission_status_fkey":"approval_status"."status" < "submission"."status" -Ref "summary_city_id_fkey":"city"."city_id" < "summary"."city_id" [delete: cascade] +Ref "summary_city_id_fkey":"city"."id" < "summary"."city_id" [delete: cascade] diff --git a/docs/database.sql b/docs/database.sql index b611318..bb0657a 100644 --- a/docs/database.sql +++ b/docs/database.sql @@ -3,7 +3,7 @@ -- -- Dumped from database version 15.2 (Debian 15.2-1.pgdg110+1) --- Dumped by pg_dump version 15.7 (Homebrew) +-- Dumped by pg_dump version 15.8 (Homebrew) SET statement_timeout = 0; SET lock_timeout = 0; @@ -16,65 +16,31 @@ SET xmloption = content; SET client_min_messages = warning; SET row_security = off; --- --- Name: approval_status; Type: TYPE; Schema: public; Owner: postgres --- - -CREATE TYPE public.approval_status AS ENUM ( - 'Pending', - 'Approved', - 'Rejected' -); - - -ALTER TYPE public.approval_status OWNER TO postgres; - --- --- Name: bna_region; Type: TYPE; Schema: public; Owner: postgres --- - -CREATE TYPE public.bna_region AS ENUM ( - 'Mid-Atlantic', - 'Midwest', - 'Mountain', - 'New England', - 'Pacific', - 'South' -); - +SET default_tablespace = ''; -ALTER TYPE public.bna_region OWNER TO postgres; +SET default_table_access_method = heap; -- --- Name: brokenspoke_status; Type: TYPE; Schema: public; Owner: postgres +-- Name: approval_status; Type: TABLE; Schema: public; Owner: postgres -- -CREATE TYPE public.brokenspoke_status AS ENUM ( - 'pending', - 'started', - 'complete' +CREATE TABLE public.approval_status ( + status character varying NOT NULL ); -ALTER TYPE public.brokenspoke_status OWNER TO postgres; +ALTER TABLE public.approval_status OWNER TO postgres; -- --- Name: brokenspoke_step; Type: TYPE; Schema: public; Owner: postgres +-- Name: bna_region; Type: TABLE; Schema: public; Owner: postgres -- -CREATE TYPE public.brokenspoke_step AS ENUM ( - 'sqs_message', - 'setup', - 'analysis', - 'cleanup' +CREATE TABLE public.bna_region ( + name character varying NOT NULL ); -ALTER TYPE public.brokenspoke_step OWNER TO postgres; - -SET default_tablespace = ''; - -SET default_table_access_method = heap; +ALTER TABLE public.bna_region OWNER TO postgres; -- -- Name: brokenspoke_pipeline; Type: TABLE; Schema: public; Owner: postgres @@ -82,7 +48,7 @@ SET default_table_access_method = heap; CREATE TABLE public.brokenspoke_pipeline ( state_machine_id uuid NOT NULL, - step public.brokenspoke_step, + step character varying, sqs_message json, fargate_task_arn character varying, s3_bucket character varying, @@ -96,12 +62,34 @@ CREATE TABLE public.brokenspoke_pipeline ( ALTER TABLE public.brokenspoke_pipeline OWNER TO postgres; +-- +-- Name: brokenspoke_status; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.brokenspoke_status ( + status character varying NOT NULL +); + + +ALTER TABLE public.brokenspoke_status OWNER TO postgres; + +-- +-- Name: brokenspoke_step; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.brokenspoke_step ( + step character varying NOT NULL +); + + +ALTER TABLE public.brokenspoke_step OWNER TO postgres; + -- -- Name: census; Type: TABLE; Schema: public; Owner: postgres -- CREATE TABLE public.census ( - census_id integer NOT NULL, + id integer NOT NULL, city_id uuid NOT NULL, created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, fips_code character varying NOT NULL, @@ -113,10 +101,10 @@ CREATE TABLE public.census ( ALTER TABLE public.census OWNER TO postgres; -- --- Name: census_census_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- Name: census_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres -- -CREATE SEQUENCE public.census_census_id_seq +CREATE SEQUENCE public.census_id_seq AS integer START WITH 1 INCREMENT BY 1 @@ -125,13 +113,13 @@ CREATE SEQUENCE public.census_census_id_seq CACHE 1; -ALTER TABLE public.census_census_id_seq OWNER TO postgres; +ALTER TABLE public.census_id_seq OWNER TO postgres; -- --- Name: census_census_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- Name: census_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres -- -ALTER SEQUENCE public.census_census_id_seq OWNED BY public.census.census_id; +ALTER SEQUENCE public.census_id_seq OWNED BY public.census.id; -- @@ -139,7 +127,7 @@ ALTER SEQUENCE public.census_census_id_seq OWNED BY public.census.census_id; -- CREATE TABLE public.city ( - city_id uuid NOT NULL, + id uuid NOT NULL, country character varying NOT NULL, state character varying NOT NULL, name character varying NOT NULL, @@ -160,7 +148,7 @@ ALTER TABLE public.city OWNER TO postgres; -- CREATE TABLE public.core_services ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, dentists double precision, doctors double precision, grocery double precision, @@ -178,35 +166,12 @@ ALTER TABLE public.core_services OWNER TO postgres; -- CREATE TABLE public.country ( - country_id integer NOT NULL, name character varying NOT NULL ); ALTER TABLE public.country OWNER TO postgres; --- --- Name: country_country_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - -CREATE SEQUENCE public.country_country_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - -ALTER TABLE public.country_country_id_seq OWNER TO postgres; - --- --- Name: country_country_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres --- - -ALTER SEQUENCE public.country_country_id_seq OWNED BY public.country.country_id; - - -- -- Name: fargate_price; Type: TABLE; Schema: public; Owner: postgres -- @@ -247,7 +212,7 @@ ALTER SEQUENCE public.fargate_price_id_seq OWNED BY public.fargate_price.id; -- CREATE TABLE public.features ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, people double precision, retail double precision, transit double precision @@ -261,7 +226,7 @@ ALTER TABLE public.features OWNER TO postgres; -- CREATE TABLE public.infrastructure ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, low_stress_miles double precision, high_stress_miles double precision ); @@ -274,7 +239,7 @@ ALTER TABLE public.infrastructure OWNER TO postgres; -- CREATE TABLE public.opportunity ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, employment double precision, higher_education double precision, k12_education double precision, @@ -290,7 +255,7 @@ ALTER TABLE public.opportunity OWNER TO postgres; -- CREATE TABLE public.recreation ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, community_centers double precision, parks double precision, recreation_trails double precision, @@ -317,7 +282,7 @@ ALTER TABLE public.seaql_migrations OWNER TO postgres; -- CREATE TABLE public.speed_limit ( - speed_limit_id integer NOT NULL, + id integer NOT NULL, city_id uuid NOT NULL, created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, residential integer NOT NULL @@ -327,10 +292,10 @@ CREATE TABLE public.speed_limit ( ALTER TABLE public.speed_limit OWNER TO postgres; -- --- Name: speed_limit_speed_limit_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- Name: speed_limit_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres -- -CREATE SEQUENCE public.speed_limit_speed_limit_id_seq +CREATE SEQUENCE public.speed_limit_id_seq AS integer START WITH 1 INCREMENT BY 1 @@ -339,13 +304,13 @@ CREATE SEQUENCE public.speed_limit_speed_limit_id_seq CACHE 1; -ALTER TABLE public.speed_limit_speed_limit_id_seq OWNER TO postgres; +ALTER TABLE public.speed_limit_id_seq OWNER TO postgres; -- --- Name: speed_limit_speed_limit_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- Name: speed_limit_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres -- -ALTER SEQUENCE public.speed_limit_speed_limit_id_seq OWNED BY public.speed_limit.speed_limit_id; +ALTER SEQUENCE public.speed_limit_id_seq OWNED BY public.speed_limit.id; -- @@ -354,27 +319,12 @@ ALTER SEQUENCE public.speed_limit_speed_limit_id_seq OWNED BY public.speed_limit CREATE TABLE public.state_region_crosswalk ( state character varying NOT NULL, - region public.bna_region NOT NULL + region character varying NOT NULL ); ALTER TABLE public.state_region_crosswalk OWNER TO postgres; --- --- Name: state_speed_limit; Type: TABLE; Schema: public; Owner: postgres --- - -CREATE TABLE public.state_speed_limit ( - state_abbrev character(2) NOT NULL, - state_fips_code character(2) NOT NULL, - speed integer NOT NULL, - created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - updated_at timestamp with time zone -); - - -ALTER TABLE public.state_speed_limit OWNER TO postgres; - -- -- Name: submission; Type: TABLE; Schema: public; Owner: postgres -- @@ -383,7 +333,7 @@ CREATE TABLE public.submission ( id integer NOT NULL, first_name character varying NOT NULL, last_name character varying NOT NULL, - title character varying, + occupation character varying, organization character varying, email character varying NOT NULL, country character varying NOT NULL, @@ -391,7 +341,7 @@ CREATE TABLE public.submission ( region character varying, fips_code character varying DEFAULT '0'::character varying NOT NULL, consent boolean NOT NULL, - status public.approval_status DEFAULT 'Pending'::public.approval_status NOT NULL, + status character varying NOT NULL, created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL ); @@ -425,7 +375,7 @@ ALTER SEQUENCE public.submission_id_seq OWNED BY public.submission.id; -- CREATE TABLE public.summary ( - bna_uuid uuid NOT NULL, + bna_id uuid NOT NULL, city_id uuid NOT NULL, created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, score double precision NOT NULL, @@ -436,17 +386,24 @@ CREATE TABLE public.summary ( ALTER TABLE public.summary OWNER TO postgres; -- --- Name: census census_id; Type: DEFAULT; Schema: public; Owner: postgres +-- Name: us_state; Type: TABLE; Schema: public; Owner: postgres -- -ALTER TABLE ONLY public.census ALTER COLUMN census_id SET DEFAULT nextval('public.census_census_id_seq'::regclass); +CREATE TABLE public.us_state ( + name character varying NOT NULL, + abbrev character varying NOT NULL, + fips_code character(2) NOT NULL, + speed_limit integer NOT NULL +); +ALTER TABLE public.us_state OWNER TO postgres; + -- --- Name: country country_id; Type: DEFAULT; Schema: public; Owner: postgres +-- Name: census id; Type: DEFAULT; Schema: public; Owner: postgres -- -ALTER TABLE ONLY public.country ALTER COLUMN country_id SET DEFAULT nextval('public.country_country_id_seq'::regclass); +ALTER TABLE ONLY public.census ALTER COLUMN id SET DEFAULT nextval('public.census_id_seq'::regclass); -- @@ -457,10 +414,10 @@ ALTER TABLE ONLY public.fargate_price ALTER COLUMN id SET DEFAULT nextval('publi -- --- Name: speed_limit speed_limit_id; Type: DEFAULT; Schema: public; Owner: postgres +-- Name: speed_limit id; Type: DEFAULT; Schema: public; Owner: postgres -- -ALTER TABLE ONLY public.speed_limit ALTER COLUMN speed_limit_id SET DEFAULT nextval('public.speed_limit_speed_limit_id_seq'::regclass); +ALTER TABLE ONLY public.speed_limit ALTER COLUMN id SET DEFAULT nextval('public.speed_limit_id_seq'::regclass); -- @@ -470,6 +427,22 @@ ALTER TABLE ONLY public.speed_limit ALTER COLUMN speed_limit_id SET DEFAULT next ALTER TABLE ONLY public.submission ALTER COLUMN id SET DEFAULT nextval('public.submission_id_seq'::regclass); +-- +-- Name: approval_status approval_status_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.approval_status + ADD CONSTRAINT approval_status_pkey PRIMARY KEY (status); + + +-- +-- Name: bna_region bna_region_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.bna_region + ADD CONSTRAINT bna_region_pkey PRIMARY KEY (name); + + -- -- Name: brokenspoke_pipeline brokenspoke_pipeline_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres -- @@ -478,20 +451,36 @@ ALTER TABLE ONLY public.brokenspoke_pipeline ADD CONSTRAINT brokenspoke_pipeline_pkey PRIMARY KEY (state_machine_id); +-- +-- Name: brokenspoke_status brokenspoke_status_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.brokenspoke_status + ADD CONSTRAINT brokenspoke_status_pkey PRIMARY KEY (status); + + +-- +-- Name: brokenspoke_step brokenspoke_step_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.brokenspoke_step + ADD CONSTRAINT brokenspoke_step_pkey PRIMARY KEY (step); + + -- -- Name: census census_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.census - ADD CONSTRAINT census_pkey PRIMARY KEY (census_id); + ADD CONSTRAINT census_pkey PRIMARY KEY (id); -- --- Name: city city_city_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- Name: city city_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.city - ADD CONSTRAINT city_city_id_key UNIQUE (city_id); + ADD CONSTRAINT city_id_key UNIQUE (id); -- @@ -507,15 +496,7 @@ ALTER TABLE ONLY public.city -- ALTER TABLE ONLY public.core_services - ADD CONSTRAINT core_services_pkey PRIMARY KEY (bna_uuid); - - --- --- Name: country country_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres --- - -ALTER TABLE ONLY public.country - ADD CONSTRAINT country_name_key UNIQUE (name); + ADD CONSTRAINT core_services_pkey PRIMARY KEY (bna_id); -- @@ -523,7 +504,7 @@ ALTER TABLE ONLY public.country -- ALTER TABLE ONLY public.country - ADD CONSTRAINT country_pkey PRIMARY KEY (country_id); + ADD CONSTRAINT country_pkey PRIMARY KEY (name); -- @@ -539,7 +520,7 @@ ALTER TABLE ONLY public.fargate_price -- ALTER TABLE ONLY public.features - ADD CONSTRAINT features_pkey PRIMARY KEY (bna_uuid); + ADD CONSTRAINT features_pkey PRIMARY KEY (bna_id); -- @@ -547,7 +528,7 @@ ALTER TABLE ONLY public.features -- ALTER TABLE ONLY public.infrastructure - ADD CONSTRAINT infrastructure_pkey PRIMARY KEY (bna_uuid); + ADD CONSTRAINT infrastructure_pkey PRIMARY KEY (bna_id); -- @@ -555,7 +536,7 @@ ALTER TABLE ONLY public.infrastructure -- ALTER TABLE ONLY public.opportunity - ADD CONSTRAINT opportunity_pkey PRIMARY KEY (bna_uuid); + ADD CONSTRAINT opportunity_pkey PRIMARY KEY (bna_id); -- @@ -563,7 +544,7 @@ ALTER TABLE ONLY public.opportunity -- ALTER TABLE ONLY public.recreation - ADD CONSTRAINT recreation_pkey PRIMARY KEY (bna_uuid); + ADD CONSTRAINT recreation_pkey PRIMARY KEY (bna_id); -- @@ -579,7 +560,7 @@ ALTER TABLE ONLY public.seaql_migrations -- ALTER TABLE ONLY public.speed_limit - ADD CONSTRAINT speed_limit_pkey PRIMARY KEY (speed_limit_id); + ADD CONSTRAINT speed_limit_pkey PRIMARY KEY (id); -- @@ -587,15 +568,7 @@ ALTER TABLE ONLY public.speed_limit -- ALTER TABLE ONLY public.state_region_crosswalk - ADD CONSTRAINT state_region_crosswalk_pkey PRIMARY KEY (state); - - --- --- Name: state_speed_limit state_speed_limit_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - -ALTER TABLE ONLY public.state_speed_limit - ADD CONSTRAINT state_speed_limit_pkey PRIMARY KEY (state_abbrev); + ADD CONSTRAINT state_region_crosswalk_pkey PRIMARY KEY (state, region); -- @@ -611,14 +584,31 @@ ALTER TABLE ONLY public.submission -- ALTER TABLE ONLY public.summary - ADD CONSTRAINT summary_pkey PRIMARY KEY (bna_uuid); + ADD CONSTRAINT summary_pkey PRIMARY KEY (bna_id); + + +-- +-- Name: us_state us_state_abbrev_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.us_state + ADD CONSTRAINT us_state_abbrev_key UNIQUE (abbrev); -- --- Name: census_census_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- Name: us_state us_state_fips_code_key; Type: CONSTRAINT; Schema: public; Owner: postgres -- -CREATE INDEX census_census_id_idx ON public.census USING btree (census_id); +ALTER TABLE ONLY public.us_state + ADD CONSTRAINT us_state_fips_code_key UNIQUE (fips_code); + + +-- +-- Name: us_state us_state_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.us_state + ADD CONSTRAINT us_state_pkey PRIMARY KEY (name); -- @@ -629,10 +619,17 @@ CREATE INDEX census_city_id_idx ON public.census USING btree (city_id); -- --- Name: city_city_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- Name: census_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX census_id_idx ON public.census USING btree (id); + + +-- +-- Name: city_id_idx; Type: INDEX; Schema: public; Owner: postgres -- -CREATE INDEX city_city_id_idx ON public.city USING btree (city_id); +CREATE INDEX city_id_idx ON public.city USING btree (id); -- @@ -643,10 +640,38 @@ CREATE INDEX speed_limit_city_id_idx ON public.speed_limit USING btree (city_id) -- --- Name: speed_limit_speed_limit_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- Name: speed_limit_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX speed_limit_id_idx ON public.speed_limit USING btree (id); + + +-- +-- Name: state_region_crosswalk_region_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX state_region_crosswalk_region_idx ON public.state_region_crosswalk USING btree (region); + + +-- +-- Name: state_region_crosswalk_state_idx; Type: INDEX; Schema: public; Owner: postgres -- -CREATE INDEX speed_limit_speed_limit_id_idx ON public.speed_limit USING btree (speed_limit_id); +CREATE INDEX state_region_crosswalk_state_idx ON public.state_region_crosswalk USING btree (state); + + +-- +-- Name: us_state_abbrev_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX us_state_abbrev_idx ON public.us_state USING btree (abbrev); + + +-- +-- Name: us_state_fips_code_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX us_state_fips_code_idx ON public.us_state USING btree (fips_code); -- @@ -654,47 +679,55 @@ CREATE INDEX speed_limit_speed_limit_id_idx ON public.speed_limit USING btree (s -- ALTER TABLE ONLY public.census - ADD CONSTRAINT census_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(city_id) ON DELETE CASCADE; + ADD CONSTRAINT census_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(id) ON DELETE CASCADE; -- --- Name: core_services core_services_bna_uuid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- Name: city city_country_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.city + ADD CONSTRAINT city_country_fkey FOREIGN KEY (country) REFERENCES public.country(name); + + +-- +-- Name: core_services core_services_bna_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.core_services - ADD CONSTRAINT core_services_bna_uuid_fkey FOREIGN KEY (bna_uuid) REFERENCES public.summary(bna_uuid) ON DELETE CASCADE; + ADD CONSTRAINT core_services_bna_id_fkey FOREIGN KEY (bna_id) REFERENCES public.summary(bna_id) ON DELETE CASCADE; -- --- Name: features features_bna_uuid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- Name: features features_bna_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.features - ADD CONSTRAINT features_bna_uuid_fkey FOREIGN KEY (bna_uuid) REFERENCES public.summary(bna_uuid) ON DELETE CASCADE; + ADD CONSTRAINT features_bna_id_fkey FOREIGN KEY (bna_id) REFERENCES public.summary(bna_id) ON DELETE CASCADE; -- --- Name: infrastructure infrastructure_bna_uuid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- Name: infrastructure infrastructure_bna_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.infrastructure - ADD CONSTRAINT infrastructure_bna_uuid_fkey FOREIGN KEY (bna_uuid) REFERENCES public.summary(bna_uuid) ON DELETE CASCADE; + ADD CONSTRAINT infrastructure_bna_id_fkey FOREIGN KEY (bna_id) REFERENCES public.summary(bna_id) ON DELETE CASCADE; -- --- Name: opportunity opportunity_bna_uuid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- Name: opportunity opportunity_bna_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.opportunity - ADD CONSTRAINT opportunity_bna_uuid_fkey FOREIGN KEY (bna_uuid) REFERENCES public.summary(bna_uuid) ON DELETE CASCADE; + ADD CONSTRAINT opportunity_bna_id_fkey FOREIGN KEY (bna_id) REFERENCES public.summary(bna_id) ON DELETE CASCADE; -- --- Name: recreation recreation_bna_uuid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- Name: recreation recreation_bna_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.recreation - ADD CONSTRAINT recreation_bna_uuid_fkey FOREIGN KEY (bna_uuid) REFERENCES public.summary(bna_uuid) ON DELETE CASCADE; + ADD CONSTRAINT recreation_bna_id_fkey FOREIGN KEY (bna_id) REFERENCES public.summary(bna_id) ON DELETE CASCADE; -- @@ -702,7 +735,39 @@ ALTER TABLE ONLY public.recreation -- ALTER TABLE ONLY public.speed_limit - ADD CONSTRAINT speed_limit_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(city_id) ON DELETE CASCADE; + ADD CONSTRAINT speed_limit_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(id) ON DELETE CASCADE; + + +-- +-- Name: state_region_crosswalk state_region_crosswalk_region_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.state_region_crosswalk + ADD CONSTRAINT state_region_crosswalk_region_fkey FOREIGN KEY (region) REFERENCES public.bna_region(name); + + +-- +-- Name: state_region_crosswalk state_region_crosswalk_state_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.state_region_crosswalk + ADD CONSTRAINT state_region_crosswalk_state_fkey FOREIGN KEY (state) REFERENCES public.us_state(name); + + +-- +-- Name: submission submission_country_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.submission + ADD CONSTRAINT submission_country_fkey FOREIGN KEY (country) REFERENCES public.country(name); + + +-- +-- Name: submission submission_status_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.submission + ADD CONSTRAINT submission_status_fkey FOREIGN KEY (status) REFERENCES public.approval_status(status); -- @@ -710,7 +775,7 @@ ALTER TABLE ONLY public.speed_limit -- ALTER TABLE ONLY public.summary - ADD CONSTRAINT summary_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(city_id) ON DELETE CASCADE; + ADD CONSTRAINT summary_city_id_fkey FOREIGN KEY (city_id) REFERENCES public.city(id) ON DELETE CASCADE; -- diff --git a/docs/database.svg b/docs/database.svg index 93ac5e6..a558139 100644 --- a/docs/database.svg +++ b/docs/database.svg @@ -4,798 +4,799 @@ - - + + dbml approval_status - - - -       approval_status        - - -    Pending     - - -    Approved     - - -    Rejected     - + + + +       approval_status        + + +status +     +character varying + +(!) + + + + +submission + + + +       submission        + + +id +     +integer + +(!) + + +first_name     +character varying + +(!) + + +last_name     +character varying + +(!) + + +occupation     +character varying + + +organization     +character varying + + +email     +character varying + +(!) + + +country     +character varying + +(!) + + +city     +character varying + +(!) + + +region     +character varying + + +fips_code     +character varying + +(!) + + +consent     +boolean + +(!) + + +status     +character varying + +(!) + + +created_at     +timestamp + +(!) + + + + + +approval_status:e->submission:w + + +* +1 bna_region - - - -       bna_region        - - -    Mid-Atlantic     - - -    Midwest     - - -    Mountain     - - -    New England     - - -    Pacific     - - -    South     - + + + +       bna_region        + + +name +     +character varying + +(!) + + + + +state_region_crosswalk + + + +       state_region_crosswalk        + + +state +     +character varying + +(!) + + +region +     +character varying + +(!) + + + + + +bna_region:e->state_region_crosswalk:w + + +* +1 + + + +brokenspoke_pipeline + + + +       brokenspoke_pipeline        + + +state_machine_id +     +uuid + +(!) + + +step     +character varying + + +sqs_message     +json + + +fargate_task_arn     +character varying + + +s3_bucket     +character varying + + +start_time     +timestamp + +(!) + + +end_time     +timestamp + + +torn_down     +boolean + + +results_posted     +boolean + + +cost     +numeric + brokenspoke_status - - - -       brokenspoke_status        - - -    pending     - - -    started     - - -    complete     - + + + +       brokenspoke_status        + + +status +     +character varying + +(!) + brokenspoke_step - - - -       brokenspoke_step        - - -    sqs_message     - - -    setup     - - -    analysis     - - -    cleanup     - - - - -brokenspoke_pipeline - - - -       brokenspoke_pipeline        - - -state_machine_id -     -uuid - -(!) - - -step     -brokenspoke_step - - -sqs_message     -json - - -fargate_task_arn     -character varying - - -s3_bucket     -character varying - - -start_time     -timestamp - -(!) - - -end_time     -timestamp - - -torn_down     -boolean - - -results_posted     -boolean - - -cost     -numeric - - - - -brokenspoke_pipeline:e->brokenspoke_step:w - + + + +       brokenspoke_step        + + +step +     +character varying + +(!) + census - - - -       census        - - -census_id -     -integer - -(!) - - -city_id     -uuid - -(!) - - -created_at     -timestamp - -(!) - - -fips_code     -character varying - -(!) - - -pop_size     -integer - -(!) - - -population     -integer - -(!) - + + + +       census        + + +id +     +integer + +(!) + + +city_id     +uuid + +(!) + + +created_at     +timestamp + +(!) + + +fips_code     +character varying + +(!) + + +pop_size     +integer + +(!) + + +population     +integer + +(!) + city - - - -       city        - - -city_id     -uuid - -(!) - - -country -     -character varying - -(!) - - -state -     -character varying - -(!) - - -name -     -character varying - -(!) - - -latitude     -doubleprecision - - -longitude     -doubleprecision - - -region     -character varying - - -state_abbrev     -character varying - - -speed_limit     -integer - - -created_at     -timestamp - -(!) - - -updated_at     -timestamp - + + + +       city        + + +id     +uuid + +(!) + + +country +     +character varying + +(!) + + +state +     +character varying + +(!) + + +name +     +character varying + +(!) + + +latitude     +doubleprecision + + +longitude     +doubleprecision + + +region     +character varying + + +state_abbrev     +character varying + + +speed_limit     +integer + + +created_at     +timestamp + +(!) + + +updated_at     +timestamp + city:e->census:w - - -* -1 + + +* +1 speed_limit - - - -       speed_limit        - - -speed_limit_id -     -integer - -(!) - - -city_id     -uuid - -(!) - - -created_at     -timestamp - -(!) - - -residential     -integer - -(!) - + + + +       speed_limit        + + +id +     +integer + +(!) + + +city_id     +uuid + +(!) + + +created_at     +timestamp + +(!) + + +residential     +integer + +(!) + - + city:e->speed_limit:w - - -* -1 + + +* +1 summary - - - -       summary        - - -bna_uuid -     -uuid - -(!) - - -city_id     -uuid - -(!) - - -created_at     -timestamp - -(!) - - -score     -doubleprecision - -(!) - - -version     -character varying - -(!) - + + + +       summary        + + +bna_id +     +uuid + +(!) + + +city_id     +uuid + +(!) + + +created_at     +timestamp + +(!) + + +score     +doubleprecision + +(!) + + +version     +character varying + +(!) + - + city:e->summary:w - - -* -1 + + +* +1 core_services - - - -       core_services        - - -bna_uuid -     -uuid - -(!) - - -dentists     -doubleprecision - - -doctors     -doubleprecision - - -grocery     -doubleprecision - - -hospitals     -doubleprecision - - -pharmacies     -doubleprecision - - -score     -doubleprecision - - -social_services     -doubleprecision - + + + +       core_services        + + +bna_id +     +uuid + +(!) + + +dentists     +doubleprecision + + +doctors     +doubleprecision + + +grocery     +doubleprecision + + +hospitals     +doubleprecision + + +pharmacies     +doubleprecision + + +score     +doubleprecision + + +social_services     +doubleprecision + country - - - -       country        - - -country_id -     -integer - -(!) - - -name     -character varying - -(!) - + + + +       country        + + +name +     +character varying + +(!) + + + + + +country:e->city:w + + +* +1 + + + + +country:e->submission:w + + +* +1 fargate_price - - - -       fargate_price        - - -id -     -integer - -(!) - - -per_second     -numeric - -(!) - - -created_at     -timestamp - -(!) - + + + +       fargate_price        + + +id +     +integer + +(!) + + +per_second     +numeric + +(!) + + +created_at     +timestamp + +(!) + features - - - -       features        - - -bna_uuid -     -uuid - -(!) - - -people     -doubleprecision - - -retail     -doubleprecision - - -transit     -doubleprecision - + + + +       features        + + +bna_id +     +uuid + +(!) + + +people     +doubleprecision + + +retail     +doubleprecision + + +transit     +doubleprecision + infrastructure - - - -       infrastructure        - - -bna_uuid -     -uuid - -(!) - - -low_stress_miles     -doubleprecision - - -high_stress_miles     -doubleprecision - + + + +       infrastructure        + + +bna_id +     +uuid + +(!) + + +low_stress_miles     +doubleprecision + + +high_stress_miles     +doubleprecision + opportunity - - - -       opportunity        - - -bna_uuid -     -uuid - -(!) - - -employment     -doubleprecision - - -higher_education     -doubleprecision - - -k12_education     -doubleprecision - - -score     -doubleprecision - - -technical_vocational_college     -doubleprecision - + + + +       opportunity        + + +bna_id +     +uuid + +(!) + + +employment     +doubleprecision + + +higher_education     +doubleprecision + + +k12_education     +doubleprecision + + +score     +doubleprecision + + +technical_vocational_college     +doubleprecision + recreation - - - -       recreation        - - -bna_uuid -     -uuid - -(!) - - -community_centers     -doubleprecision - - -parks     -doubleprecision - - -recreation_trails     -doubleprecision - - -score     -doubleprecision - + + + +       recreation        + + +bna_id +     +uuid + +(!) + + +community_centers     +doubleprecision + + +parks     +doubleprecision + + +recreation_trails     +doubleprecision + + +score     +doubleprecision + seaql_migrations - - - -       seaql_migrations        - - -version -     -character varying - -(!) - - -applied_at     -bigint - -(!) - - - - -state_region_crosswalk - - - -       state_region_crosswalk        - - -state -     -character varying - -(!) - - -region     -bna_region - -(!) - - - - -state_region_crosswalk:e->bna_region:w - - - - -state_speed_limit - - - -       state_speed_limit        - - -state_abbrev -     -character (2) - -(!) - - -state_fips_code     -character (2) - -(!) - - -speed     -integer - -(!) - - -created_at     -timestamp - -(!) - - -updated_at     -timestamp - - - - -submission - - - -       submission        - - -id -     -integer - -(!) - - -first_name     -character varying - -(!) - - -last_name     -character varying - -(!) - - -title     -character varying - - -organization     -character varying - - -email     -character varying - -(!) - - -country     -character varying - -(!) - - -city     -character varying - -(!) - - -region     -character varying - - -fips_code     -character varying - -(!) - - -consent     -boolean - -(!) - - -status     -approval_status - -(!) - - -created_at     -timestamp - -(!) - - - - -submission:e->approval_status:w - + + + +       seaql_migrations        + + +version +     +character varying + +(!) + + +applied_at     +bigint + +(!) + - + summary:e->core_services:w - - -* -1 + + +* +1 - + summary:e->features:w - - -* -1 + + +* +1 - + summary:e->infrastructure:w - - -* -1 + + +* +1 - + summary:e->opportunity:w - - -* -1 + + +* +1 - + summary:e->recreation:w - - -* -1 + + +* +1 + + + +us_state + + + +       us_state        + + +name +     +character varying + +(!) + + +abbrev     +character varying + +(!) + + +fips_code     +character (2) + +(!) + + +speed_limit     +integer + +(!) + + + + + +us_state:e->state_region_crosswalk:w + + +* +1 diff --git a/effortless/src/api.rs b/effortless/src/api.rs index 3e1d9fc..7261244 100644 --- a/effortless/src/api.rs +++ b/effortless/src/api.rs @@ -1,8 +1,8 @@ use crate::{ error::{APIError, APIErrorSource, APIErrors}, - fragment::{self, get_apigw_request_id}, + fragment::{self, get_apigw_request_id, BnaRequestExt}, }; -use lambda_http::{http::StatusCode, Request, RequestPayloadExt}; +use lambda_http::{http::StatusCode, Body, Request, RequestExt, RequestPayloadExt, Response}; use serde::de::DeserializeOwned; use std::{fmt::Display, str::FromStr}; @@ -168,7 +168,7 @@ pub fn invalid_body(event: &Request, details: &str) -> APIError { ) } -/// Create and APIError from and API Gateway event, representing a parameter issue. +/// Create and APIError from and API Gateway event, representing a path parameter issue. pub fn invalid_path_parameter(event: &Request, parameter: &str, details: &str) -> APIError { APIError::with_pointer( get_apigw_request_id(event), @@ -176,3 +176,175 @@ pub fn invalid_path_parameter(event: &Request, parameter: &str, details: &str) - format!("invalid path parameter `{parameter}`: {details}").as_str(), ) } + +/// Create and APIError from and API Gateway event, representing a query parameter issue. +pub fn invalid_query_parameter(event: &Request, parameter: &str, details: &str) -> APIError { + APIError::with_pointer( + get_apigw_request_id(event), + parameter, + format!("invalid query parameter `{parameter}`: {details}").as_str(), + ) +} + +/// Maximum number of items allowed to be returned by a query at once. +pub const MAX_PAGE_SIZE: u64 = 100; +/// Number of items to return per page if no argument was provided. +pub const DEFAULT_PAGE_SIZE: u64 = 50; + +/// The pagination details. +#[derive(Debug)] +pub struct PaginationParameters { + /// The number of items per page. + pub page_size: u64, + /// The result page being returned. + pub page: u64, +} + +impl Default for PaginationParameters { + fn default() -> Self { + Self { + page_size: DEFAULT_PAGE_SIZE, + page: 0, + } + } +} + +// Retrieves the pagination parameters. +/// +/// If nothing is provided, the first page is returned and will contain up to +/// [`DEFAULT_PAGE_SIZE`] items. +/// +/// If `page` does not exist, the lambda functions will return the first page. +pub fn extract_pagination_parameters( + event: &Request, +) -> Result> { + let mut pagination = PaginationParameters::default(); + + // If no query parameters were provided, return the default values. + if !event.has_query_parameters() { + return Ok(pagination); + } + + // Otherwise process the parameters. + let parameter = "page_size"; + if let Some(page_size) = event.query_string_parameters().first(parameter) { + match page_size.parse::() { + Ok(page_size) => { + pagination.page_size = match page_size { + 0..=MAX_PAGE_SIZE => page_size, + _ => MAX_PAGE_SIZE, + } + } + Err(e) => { + let api_error = invalid_query_parameter( + event, + parameter, + format!("failed to process the `{parameter}` parameter: {e}").as_str(), + ); + return Err(APIErrors::new(&[api_error]).into()); + } + } + } + + let parameter = "page"; + if let Some(page) = event.query_string_parameters().first(parameter) { + match page.parse::() { + Ok(page) => pagination.page = page, + Err(e) => { + let api_error = invalid_query_parameter( + event, + parameter, + format!("failed to process the `{parameter}` parameter: {e}").as_str(), + ); + return Err(APIErrors::new(&[api_error]).into()); + } + } + } + + Ok(pagination) +} + +#[cfg(test)] +mod tests { + use super::*; + use lambda_http::{http::StatusCode, request::from_str, RequestExt}; + use std::collections::HashMap; + + #[test] + fn test_pagination_parameters_without_params() { + let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); + let req = from_str(input).unwrap(); + + let actual = extract_pagination_parameters(&req).unwrap(); + assert_eq!(actual.page_size, DEFAULT_PAGE_SIZE); + assert_eq!(actual.page, 0); + } + + #[test] + fn test_pagination_parameters_with_valid_params() { + const PAGE_SIZE: u64 = 25; + const PAGE: u64 = 8; + + let mut data = HashMap::new(); + data.insert("page_size".into(), vec![PAGE_SIZE.to_string()]); + data.insert("page".into(), vec![PAGE.to_string()]); + + let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); + let result = from_str(input).unwrap(); + let req = result.with_query_string_parameters(data); + + let actual = extract_pagination_parameters(&req).unwrap(); + assert_eq!(actual.page_size, PAGE_SIZE); + assert_eq!(actual.page, PAGE); + } + + #[test] + fn test_pagination_parameters_with_invalid_page_size() { + let mut data = HashMap::new(); + data.insert("page_size".into(), vec!["-1".to_string()]); + data.insert("page".into(), vec!["50".to_string()]); + + let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); + let result = from_str(input).unwrap(); + let req = result.with_query_string_parameters(data); + + let actual = extract_pagination_parameters(&req).unwrap_err(); + + // Ensure the error had the BAD_REQUEST status. + assert_eq!(actual.status(), StatusCode::BAD_REQUEST); + + // Ensure the error message is correct. + let b = actual.body(); + let message = match b { + Body::Text(message) => message, + _ => panic!("The body does not match the Text invariant."), + }; + let api_error: APIErrors = serde_json::from_str(message).unwrap(); + assert_eq!(api_error.errors.len(), 1) + } + + #[test] + fn test_pagination_parameters_with_invalid_page() { + let mut data = HashMap::new(); + data.insert("page_size".into(), vec!["1".to_string()]); + data.insert("page".into(), vec!["abc".to_string()]); + + let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); + let result = from_str(input).unwrap(); + let req = result.with_query_string_parameters(data); + + let actual = extract_pagination_parameters(&req).unwrap_err(); + + // Ensure the error had the BAD_REQUEST status. + assert_eq!(actual.status(), StatusCode::BAD_REQUEST); + + // Ensure the error message is correct. + let b = actual.body(); + let message = match b { + Body::Text(message) => message, + _ => panic!("The body does not match the Text invariant."), + }; + let api_error: APIErrors = serde_json::from_str(message).unwrap(); + assert_eq!(api_error.errors.len(), 1) + } +} diff --git a/effortless/src/fixtures/api-gateway-v2-proxy-request-minimal.json b/effortless/src/fixtures/api-gateway-v2-proxy-request-minimal.json new file mode 100644 index 0000000..956591d --- /dev/null +++ b/effortless/src/fixtures/api-gateway-v2-proxy-request-minimal.json @@ -0,0 +1,35 @@ +{ + "headers": { + "accept": "*/*", + "content-length": "0", + "host": "xxx.execute-api.us-east-1.amazonaws.com", + "user-agent": "curl/7.64.1", + "x-amzn-trace-id": "Root=1-5eb33c07-de25b420912dee103a5db434", + "x-forwarded-for": "65.78.31.245", + "x-forwarded-port": "443", + "x-forwarded-proto": "https" + }, + "isBase64Encoded": false, + "rawPath": "/", + "rawQueryString": "", + "requestContext": { + "accountId": "123456789012", + "apiId": "xxx", + "domainName": "xxx.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "xxx", + "http": { + "method": "GET", + "path": "/", + "protocol": "HTTP/1.1", + "sourceIp": "65.78.31.245", + "userAgent": "curl/7.64.1" + }, + "requestId": "MIZRNhJtIAMEMDw=", + "routeKey": "$default", + "stage": "$default", + "time": "06/May/2020:22:36:55 +0000", + "timeEpoch": 1588804615616 + }, + "routeKey": "$default", + "version": "2.0" +} diff --git a/effortless/src/fragment.rs b/effortless/src/fragment.rs index 69a73b7..6793781 100644 --- a/effortless/src/fragment.rs +++ b/effortless/src/fragment.rs @@ -12,6 +12,7 @@ pub enum ParseParameterError { URLEncodingError(#[from] FromUtf8Error), } +/// Parse a parameter found in a QueryMap. fn parse_parameter(qm: &QueryMap, parameter: &str) -> Option> where T: FromStr, @@ -153,6 +154,9 @@ pub trait BnaRequestExt { /// Returns true if there are path parameters available. fn has_path_parameters(&self) -> bool; + + /// Returns true if there are query parameters available. + fn has_query_parameters(&self) -> bool; } impl BnaRequestExt for http::Request { @@ -190,6 +194,10 @@ impl BnaRequestExt for http::Request { fn has_path_parameters(&self) -> bool { !self.path_parameters().is_empty() } + + fn has_query_parameters(&self) -> bool { + !self.query_string_parameters().is_empty() + } } #[cfg(test)] diff --git a/effortless/src/lib.rs b/effortless/src/lib.rs index 8d353b1..7e530b6 100644 --- a/effortless/src/lib.rs +++ b/effortless/src/lib.rs @@ -1,3 +1,4 @@ pub mod api; pub mod error; pub mod fragment; +pub mod response; diff --git a/effortless/src/response.rs b/effortless/src/response.rs new file mode 100644 index 0000000..eb84ace --- /dev/null +++ b/effortless/src/response.rs @@ -0,0 +1,11 @@ +use lambda_http::{ + http::{header, StatusCode}, + Body, Error, Response, +}; + +pub fn make_json_created_response(body: String) -> Result, Error> { + Ok(Response::builder() + .status(StatusCode::CREATED) + .header(header::CONTENT_TYPE, "application/json") + .body(Body::Text(body))?) +} diff --git a/entity/src/entities/approval_status.rs b/entity/src/entities/approval_status.rs new file mode 100644 index 0000000..8e30fdb --- /dev/null +++ b/entity/src/entities/approval_status.rs @@ -0,0 +1,25 @@ +//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "approval_status")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub status: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::submission::Entity")] + Submission, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Submission.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/bna_region.rs b/entity/src/entities/bna_region.rs new file mode 100644 index 0000000..d1dd24d --- /dev/null +++ b/entity/src/entities/bna_region.rs @@ -0,0 +1,38 @@ +//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "bna_region")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub name: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::state_region_crosswalk::Entity")] + StateRegionCrosswalk, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::StateRegionCrosswalk.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + super::state_region_crosswalk::Relation::UsState.def() + } + fn via() -> Option { + Some( + super::state_region_crosswalk::Relation::BnaRegion + .def() + .rev(), + ) + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/brokenspoke_pipeline.rs b/entity/src/entities/brokenspoke_pipeline.rs index 0a07f18..ffaa0c9 100644 --- a/entity/src/entities/brokenspoke_pipeline.rs +++ b/entity/src/entities/brokenspoke_pipeline.rs @@ -1,6 +1,5 @@ //! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 -use super::sea_orm_active_enums::BrokenspokeStep; use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; @@ -9,7 +8,7 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub state_machine_id: Uuid, - pub step: Option, + pub step: Option, pub sqs_message: Option, pub fargate_task_arn: Option, pub s3_bucket: Option, diff --git a/entity/src/entities/state_speed_limit.rs b/entity/src/entities/brokenspoke_status.rs similarity index 64% rename from entity/src/entities/state_speed_limit.rs rename to entity/src/entities/brokenspoke_status.rs index b6377ec..8f3caee 100644 --- a/entity/src/entities/state_speed_limit.rs +++ b/entity/src/entities/brokenspoke_status.rs @@ -4,14 +4,10 @@ use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "state_speed_limit")] +#[sea_orm(table_name = "brokenspoke_status")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] - pub state_abbrev: String, - pub state_fips_code: String, - pub speed: i32, - pub created_at: TimeDateTimeWithTimeZone, - pub updated_at: Option, + pub status: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/entity/src/entities/brokenspoke_step.rs b/entity/src/entities/brokenspoke_step.rs new file mode 100644 index 0000000..ddb37dc --- /dev/null +++ b/entity/src/entities/brokenspoke_step.rs @@ -0,0 +1,16 @@ +//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "brokenspoke_step")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub step: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/city.rs b/entity/src/entities/city.rs index 69a222e..f9645b7 100644 --- a/entity/src/entities/city.rs +++ b/entity/src/entities/city.rs @@ -29,6 +29,14 @@ pub struct Model { pub enum Relation { #[sea_orm(has_many = "super::census::Entity")] Census, + #[sea_orm( + belongs_to = "super::country::Entity", + from = "Column::Country", + to = "super::country::Column::Name", + on_update = "NoAction", + on_delete = "NoAction" + )] + Country, #[sea_orm(has_many = "super::speed_limit::Entity")] SpeedLimit, #[sea_orm(has_many = "super::summary::Entity")] @@ -41,6 +49,12 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Country.def() + } +} + impl Related for Entity { fn to() -> RelationDef { Relation::SpeedLimit.def() diff --git a/entity/src/entities/country.rs b/entity/src/entities/country.rs index e9507fd..fb52841 100644 --- a/entity/src/entities/country.rs +++ b/entity/src/entities/country.rs @@ -6,13 +6,28 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "country")] pub struct Model { - #[sea_orm(primary_key)] - pub id: i32, - #[sea_orm(unique)] + #[sea_orm(primary_key, auto_increment = false)] pub name: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} +pub enum Relation { + #[sea_orm(has_many = "super::city::Entity")] + City, + #[sea_orm(has_many = "super::submission::Entity")] + Submission, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::City.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Submission.def() + } +} impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/mod.rs b/entity/src/entities/mod.rs index 1289eea..267c55d 100644 --- a/entity/src/entities/mod.rs +++ b/entity/src/entities/mod.rs @@ -2,7 +2,11 @@ pub mod prelude; +pub mod approval_status; +pub mod bna_region; pub mod brokenspoke_pipeline; +pub mod brokenspoke_status; +pub mod brokenspoke_step; pub mod census; pub mod city; pub mod core_services; @@ -12,9 +16,8 @@ pub mod features; pub mod infrastructure; pub mod opportunity; pub mod recreation; -pub mod sea_orm_active_enums; pub mod speed_limit; pub mod state_region_crosswalk; -pub mod state_speed_limit; pub mod submission; pub mod summary; +pub mod us_state; diff --git a/entity/src/entities/prelude.rs b/entity/src/entities/prelude.rs index 1bfedf4..98aec6a 100644 --- a/entity/src/entities/prelude.rs +++ b/entity/src/entities/prelude.rs @@ -1,6 +1,10 @@ //! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 +pub use super::approval_status::Entity as ApprovalStatus; +pub use super::bna_region::Entity as BnaRegion; pub use super::brokenspoke_pipeline::Entity as BrokenspokePipeline; +pub use super::brokenspoke_status::Entity as BrokenspokeStatus; +pub use super::brokenspoke_step::Entity as BrokenspokeStep; pub use super::census::Entity as Census; pub use super::city::Entity as City; pub use super::core_services::Entity as CoreServices; @@ -12,6 +16,6 @@ pub use super::opportunity::Entity as Opportunity; pub use super::recreation::Entity as Recreation; pub use super::speed_limit::Entity as SpeedLimit; pub use super::state_region_crosswalk::Entity as StateRegionCrosswalk; -pub use super::state_speed_limit::Entity as StateSpeedLimit; pub use super::submission::Entity as Submission; pub use super::summary::Entity as Summary; +pub use super::us_state::Entity as UsState; diff --git a/entity/src/entities/sea_orm_active_enums.rs b/entity/src/entities/sea_orm_active_enums.rs deleted file mode 100644 index 4cabe36..0000000 --- a/entity/src/entities/sea_orm_active_enums.rs +++ /dev/null @@ -1,43 +0,0 @@ -//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 - -use sea_orm::entity::prelude::*; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "approval_status")] -pub enum ApprovalStatus { - #[sea_orm(string_value = "Approved")] - Approved, - #[sea_orm(string_value = "Pending")] - Pending, - #[sea_orm(string_value = "Rejected")] - Rejected, -} -#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "bna_region")] -pub enum BnaRegion { - #[sea_orm(string_value = "Mid-Atlantic")] - MidAtlantic, - #[sea_orm(string_value = "Midwest")] - Midwest, - #[sea_orm(string_value = "Mountain")] - Mountain, - #[sea_orm(string_value = "New England")] - NewEngland, - #[sea_orm(string_value = "Pacific")] - Pacific, - #[sea_orm(string_value = "South")] - South, -} -#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "brokenspoke_step")] -pub enum BrokenspokeStep { - #[sea_orm(string_value = "analysis")] - Analysis, - #[sea_orm(string_value = "cleanup")] - Cleanup, - #[sea_orm(string_value = "setup")] - Setup, - #[sea_orm(string_value = "sqs_message")] - SqsMessage, -} diff --git a/entity/src/entities/state_region_crosswalk.rs b/entity/src/entities/state_region_crosswalk.rs index 0e8f486..082fba0 100644 --- a/entity/src/entities/state_region_crosswalk.rs +++ b/entity/src/entities/state_region_crosswalk.rs @@ -1,6 +1,5 @@ //! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 -use super::sea_orm_active_enums::BnaRegion; use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; @@ -9,10 +8,40 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub state: String, - pub region: BnaRegion, + #[sea_orm(primary_key, auto_increment = false)] + pub region: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} +pub enum Relation { + #[sea_orm( + belongs_to = "super::bna_region::Entity", + from = "Column::Region", + to = "super::bna_region::Column::Name", + on_update = "NoAction", + on_delete = "NoAction" + )] + BnaRegion, + #[sea_orm( + belongs_to = "super::us_state::Entity", + from = "Column::State", + to = "super::us_state::Column::Name", + on_update = "NoAction", + on_delete = "NoAction" + )] + UsState, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::BnaRegion.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::UsState.def() + } +} impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/submission.rs b/entity/src/entities/submission.rs index 354d415..f237478 100644 --- a/entity/src/entities/submission.rs +++ b/entity/src/entities/submission.rs @@ -1,6 +1,5 @@ //! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 -use super::sea_orm_active_enums::ApprovalStatus; use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; @@ -19,11 +18,40 @@ pub struct Model { pub region: Option, pub fips_code: String, pub consent: bool, - pub status: ApprovalStatus, + pub status: String, pub created_at: TimeDateTimeWithTimeZone, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} +pub enum Relation { + #[sea_orm( + belongs_to = "super::approval_status::Entity", + from = "Column::Status", + to = "super::approval_status::Column::Status", + on_update = "NoAction", + on_delete = "NoAction" + )] + ApprovalStatus, + #[sea_orm( + belongs_to = "super::country::Entity", + from = "Column::Country", + to = "super::country::Column::Name", + on_update = "NoAction", + on_delete = "NoAction" + )] + Country, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ApprovalStatus.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Country.def() + } +} impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/entities/us_state.rs b/entity/src/entities/us_state.rs new file mode 100644 index 0000000..8a75d36 --- /dev/null +++ b/entity/src/entities/us_state.rs @@ -0,0 +1,39 @@ +//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "us_state")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub name: String, + #[sea_orm(unique)] + pub abbrev: String, + #[sea_orm(unique)] + pub fips_code: String, + pub speed_limit: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::state_region_crosswalk::Entity")] + StateRegionCrosswalk, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::StateRegionCrosswalk.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + super::state_region_crosswalk::Relation::BnaRegion.def() + } + fn via() -> Option { + Some(super::state_region_crosswalk::Relation::UsState.def().rev()) + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/entity/src/wrappers/brokenspoke_pipeline.rs b/entity/src/wrappers/brokenspoke_pipeline.rs index b6454fb..25c092a 100644 --- a/entity/src/wrappers/brokenspoke_pipeline.rs +++ b/entity/src/wrappers/brokenspoke_pipeline.rs @@ -1,4 +1,4 @@ -use crate::entities::{brokenspoke_pipeline, sea_orm_active_enums}; +use crate::entities::brokenspoke_pipeline; use sea_orm::{ prelude::{Decimal, Json, TimeDateTimeWithTimeZone, Uuid}, ActiveValue, IntoActiveModel, @@ -15,7 +15,7 @@ pub struct BrokenspokePipelinePost { pub sqs_message: Option, pub start_time: TimeDateTimeWithTimeZone, pub state_machine_id: Uuid, - pub step: Option, + pub step: Option, pub torn_down: Option, } @@ -47,7 +47,7 @@ pub struct BrokenspokePipelinePatch { pub scheduled_trigger_id: Option>, pub sqs_message: Option>, pub start_time: Option>, - pub state: Option>, + pub state: Option>, pub torn_down: Option>, } diff --git a/entity/src/wrappers/mod.rs b/entity/src/wrappers/mod.rs index 0897e4a..6d2d33e 100644 --- a/entity/src/wrappers/mod.rs +++ b/entity/src/wrappers/mod.rs @@ -4,7 +4,6 @@ pub mod census; pub mod city; pub mod submission; -use crate::entities::sea_orm_active_enums; use serde::{Deserialize, Serialize}; use std::{fmt::Display, str::FromStr}; @@ -15,26 +14,6 @@ pub enum ApprovalStatus { Rejected, } -impl From for ApprovalStatus { - fn from(value: sea_orm_active_enums::ApprovalStatus) -> Self { - match value { - sea_orm_active_enums::ApprovalStatus::Approved => Self::Approved, - sea_orm_active_enums::ApprovalStatus::Rejected => Self::Rejected, - sea_orm_active_enums::ApprovalStatus::Pending => Self::Pending, - } - } -} - -impl From for sea_orm_active_enums::ApprovalStatus { - fn from(val: ApprovalStatus) -> Self { - match val { - ApprovalStatus::Approved => sea_orm_active_enums::ApprovalStatus::Approved, - ApprovalStatus::Rejected => sea_orm_active_enums::ApprovalStatus::Rejected, - ApprovalStatus::Pending => sea_orm_active_enums::ApprovalStatus::Pending, - } - } -} - impl FromStr for ApprovalStatus { type Err = serde_plain::Error; @@ -51,28 +30,6 @@ pub enum BrokenspokeStep { Cleanup, } -impl From for BrokenspokeStep { - fn from(value: sea_orm_active_enums::BrokenspokeStep) -> Self { - match value { - sea_orm_active_enums::BrokenspokeStep::Analysis => Self::Analysis, - sea_orm_active_enums::BrokenspokeStep::Cleanup => Self::Cleanup, - sea_orm_active_enums::BrokenspokeStep::Setup => Self::Setup, - sea_orm_active_enums::BrokenspokeStep::SqsMessage => Self::SqsMessage, - } - } -} - -impl From for sea_orm_active_enums::BrokenspokeStep { - fn from(val: BrokenspokeStep) -> Self { - match val { - BrokenspokeStep::Analysis => sea_orm_active_enums::BrokenspokeStep::Analysis, - BrokenspokeStep::Cleanup => sea_orm_active_enums::BrokenspokeStep::Cleanup, - BrokenspokeStep::Setup => sea_orm_active_enums::BrokenspokeStep::Setup, - BrokenspokeStep::SqsMessage => sea_orm_active_enums::BrokenspokeStep::SqsMessage, - } - } -} - impl FromStr for BrokenspokeStep { type Err = serde_plain::Error; @@ -93,32 +50,6 @@ pub enum BnaRegion { South, } -impl From for BnaRegion { - fn from(value: sea_orm_active_enums::BnaRegion) -> Self { - match value { - sea_orm_active_enums::BnaRegion::MidAtlantic => Self::MidAtlantic, - sea_orm_active_enums::BnaRegion::NewEngland => Self::NewEngland, - sea_orm_active_enums::BnaRegion::Midwest => Self::Midwest, - sea_orm_active_enums::BnaRegion::Mountain => Self::Mountain, - sea_orm_active_enums::BnaRegion::Pacific => Self::Pacific, - sea_orm_active_enums::BnaRegion::South => Self::South, - } - } -} - -impl From for sea_orm_active_enums::BnaRegion { - fn from(value: BnaRegion) -> Self { - match value { - BnaRegion::MidAtlantic => sea_orm_active_enums::BnaRegion::MidAtlantic, - BnaRegion::NewEngland => sea_orm_active_enums::BnaRegion::NewEngland, - BnaRegion::Midwest => sea_orm_active_enums::BnaRegion::Midwest, - BnaRegion::Mountain => sea_orm_active_enums::BnaRegion::Mountain, - BnaRegion::Pacific => sea_orm_active_enums::BnaRegion::Pacific, - BnaRegion::South => sea_orm_active_enums::BnaRegion::South, - } - } -} - impl FromStr for BnaRegion { type Err = serde_plain::Error; diff --git a/entity/src/wrappers/submission.rs b/entity/src/wrappers/submission.rs index eddc73f..e53869b 100644 --- a/entity/src/wrappers/submission.rs +++ b/entity/src/wrappers/submission.rs @@ -1,4 +1,4 @@ -use crate::entities::{sea_orm_active_enums, submission}; +use crate::entities::submission; use sea_orm::{ActiveValue, IntoActiveModel}; use serde::{Deserialize, Serialize}; @@ -14,7 +14,7 @@ pub struct SubmissionPost { pub region: Option, pub fips_code: String, pub consent: bool, - pub status: Option, + pub status: Option, } impl IntoActiveModel for SubmissionPost { @@ -49,7 +49,7 @@ pub struct SubmissionPatch { pub region: Option>, pub fips_code: Option, pub consent: Option, - pub status: Option, + pub status: Option, } impl IntoActiveModel for SubmissionPatch { @@ -139,7 +139,7 @@ mod tests { let region = None; let fips_code = "0123456".to_string(); let consent = true; - let status = Some(sea_orm_active_enums::ApprovalStatus::Approved); + let status = Some("Approved".to_string()); let wrapper = SubmissionPost { first_name: first_name.clone(), last_name: last_name.clone(), @@ -166,7 +166,7 @@ mod tests { region: ActiveValue::Set(region), fips_code: ActiveValue::Set(fips_code), consent: ActiveValue::Set(consent), - status: ActiveValue::Set(sea_orm_active_enums::ApprovalStatus::Approved), + status: ActiveValue::Set("Approved".to_string()), created_at: ActiveValue::NotSet, }; assert_eq!(active_model, expected); diff --git a/examples/seeder.rs b/examples/seeder.rs index 7419d07..7c91421 100644 --- a/examples/seeder.rs +++ b/examples/seeder.rs @@ -8,21 +8,15 @@ use csv::Reader; use dotenv::dotenv; use entity::{ census, city, core_services, features, infrastructure, opportunity, prelude::*, recreation, - speed_limit, state_region_crosswalk, state_speed_limit, summary, + speed_limit, summary, }; use sea_orm::{prelude::Uuid, ActiveValue, Database, EntityTrait}; use serde::Deserialize; -use std::{collections::HashMap, str::FromStr}; +use std::collections::{HashMap, HashSet}; -const US_STATE_COUNT: usize = 50; +const US_STATE_COUNT: usize = 51; // w/ Puerto Rico const CHUNK_SIZE: usize = 1000; - -#[derive(Debug, Deserialize)] -pub struct StateSpeedLimitCSV { - state: String, - fips_code_state: String, - speed: u32, -} +const BNA_COUNTRY_COUNT: usize = 15; #[derive(Debug, Deserialize)] pub struct CitySpeedLimitCSV { @@ -30,12 +24,6 @@ pub struct CitySpeedLimitCSV { speed: u32, } -#[derive(Debug, Deserialize)] -pub struct StateRegionCrosswalkCSV { - state_full: String, - region: String, -} - #[tokio::main] async fn main() -> Result<(), Report> { dotenv().ok(); @@ -51,29 +39,24 @@ async fn main() -> Result<(), Report> { let mut bna_opportunity: Vec = Vec::new(); let mut bna_infrastructure: Vec = Vec::new(); let mut versions: HashMap = HashMap::new(); - let mut state_speed_limits: Vec = - Vec::with_capacity(US_STATE_COUNT); - let mut state_region_crosswalks: Vec = - Vec::with_capacity(US_STATE_COUNT); let mut city_fips2limit: HashMap = HashMap::new(); - let mut state_bnaregion: HashMap = HashMap::new(); - // Load the state speed limit file. - let mut state_speed_limit_csv_reader = - Reader::from_path("examples/seeder-state_fips_speed.csv")?; - for record in state_speed_limit_csv_reader.deserialize() { - // Read the record. - let speed_limit: StateSpeedLimitCSV = record?; + // Set the database connection. + let database_url = dotenv::var("DATABASE_URL")?; + let db = Database::connect(database_url).await?; - // Populate the model. - let state_speed_limit_model = state_speed_limit::ActiveModel { - state_abbrev: ActiveValue::Set(speed_limit.state), - state_fips_code: ActiveValue::Set(speed_limit.fips_code_state), - speed: ActiveValue::Set(speed_limit.speed.try_into()?), - created_at: ActiveValue::NotSet, - updated_at: ActiveValue::NotSet, - }; - state_speed_limits.push(state_speed_limit_model); + // Load the US States Region Crosswalk. + let state_region_models = StateRegionCrosswalk::find().all(&db).await?; + let mut state_regions: HashMap = HashMap::with_capacity(US_STATE_COUNT); + for state_region in state_region_models { + state_regions.insert(state_region.state, state_region.region); + } + + // Load the available countries. + let country_models = Country::find().all(&db).await?; + let mut countries: HashSet = HashSet::with_capacity(BNA_COUNTRY_COUNT); + for country in country_models { + countries.insert(country.name); } // Load the city speed limit file. @@ -85,23 +68,6 @@ async fn main() -> Result<(), Report> { city_fips2limit.insert(speed_limit.fips_code_city, speed_limit.speed); } - // Load the state/region crosswalk CSV file. - let mut state_region_crosswalk_csv_reader = - Reader::from_path("examples/seeder-state_region_crosswalk-v24.05.csv")?; - for record in state_region_crosswalk_csv_reader.deserialize() { - // Read the record. - let state_region: StateRegionCrosswalkCSV = record?; - state_bnaregion.insert(state_region.state_full.clone(), state_region.region.clone()); - - // Populate the state region crosswalk model. - let r = entity::wrappers::BnaRegion::from_str(&state_region.region).unwrap(); - let state_region_crosswalk_model = state_region_crosswalk::ActiveModel { - state: ActiveValue::Set(state_region.state_full), - region: ActiveValue::Set(r.into()), - }; - state_region_crosswalks.push(state_region_crosswalk_model); - } - // Load the historical data CSV file. let mut csv_reader = Reader::from_path("../../PeopleForBikes/brokenspoke/assets/city-ratings/city-ratings-all-historical-results-v24.07.csv")?; for record in csv_reader.deserialize() { @@ -136,14 +102,19 @@ async fn main() -> Result<(), Report> { Some(fips) => city_fips2limit.get(&fips).map(|x| *x as i32), None => None, }; - let bna_region = state_bnaregion + let bna_region = state_regions .get(&scorecard.state_full) .map(|s| s.to_owned()) .unwrap_or(scorecard.country.clone()); + let err_msg = format!("cannot find country {}", scorecard.country.clone()); + let country = countries + .get(&scorecard.country.clone()) + .expect(err_msg.as_str()) + .to_string(); let city_model = city::ActiveModel { id: ActiveValue::Set(city_uuid), - country: ActiveValue::Set(scorecard.country.clone()), + country: ActiveValue::Set(country), latitude: ActiveValue::Set(Some(scorecard.census_latitude)), longitude: ActiveValue::Set(Some(scorecard.census_longitude)), name: ActiveValue::Set(scorecard.city.clone()), @@ -250,14 +221,7 @@ async fn main() -> Result<(), Report> { bna_infrastructure.push(infratructure_model); } - // Set the database connection. - let database_url = dotenv::var("DATABASE_URL")?; - let db = Database::connect(database_url).await?; - // Insert the entries. - StateSpeedLimit::insert_many(state_speed_limits) - .exec(&db) - .await?; City::insert_many(cities.into_values()).exec(&db).await?; Census::insert_many(census_populations).exec(&db).await?; SpeedLimit::insert_many(speed_limits).exec(&db).await?; diff --git a/lambdas/src/bnas/get-bnas-analysis.rs b/lambdas/src/bnas/get-bnas-analysis.rs index ea565e5..ed6b714 100644 --- a/lambdas/src/bnas/get-bnas-analysis.rs +++ b/lambdas/src/bnas/get-bnas-analysis.rs @@ -1,14 +1,11 @@ use dotenv::dotenv; use effortless::{ - api::{entry_not_found, internal_error}, + api::{entry_not_found, extract_pagination_parameters, internal_error}, fragment::BnaRequestExt, }; use entity::prelude::*; use lambda_http::{run, service_fn, Body, Error, IntoResponse, Request, Response}; -use lambdas::{ - api_database_connect, bnas::extract_path_parameters, build_paginated_response, - pagination_parameters, -}; +use lambdas::{api_database_connect, bnas::extract_path_parameters, build_paginated_response}; use sea_orm::{EntityTrait, PaginatorTrait}; use serde_json::json; use tracing::{debug, info}; @@ -21,44 +18,49 @@ async fn function_handler(event: Request) -> Result, Error> { Ok(db) => db, Err(e) => return Ok(e), }; - - if !event.has_path_parameters() { - // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), - Err(e) => return Ok(e), + if event.has_path_parameters() { + let params = match extract_path_parameters(&event) { + Ok(p) => p, + Err(e) => return Ok(e.into()), }; - // - let select = BrokenspokePipeline::find(); - let query = select - .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) - .await; - let res: Response = match query { - Ok(models) => { - let total_items = select.count(&db).await?; - build_paginated_response(json!(models), total_items, page, page_size, &event)? - } - Err(e) => internal_error(&event, e.to_string().as_str()).into(), + // Retrieve a specific entry. + debug!("Processing the requests..."); + let model = BrokenspokePipeline::find_by_id(params.bna_id) + .one(&db) + .await?; + let res: Response = match model { + Some(model) => json!(model).into_response().await, + None => entry_not_found(&event).into(), }; return Ok(res); } - let params = match extract_path_parameters(&event) { + // Retrieve pagination parameters if any. + let pagination = match extract_pagination_parameters(&event) { Ok(p) => p, - Err(e) => return Ok(e.into()), + Err(e) => return Ok(e), }; - // Retrieve a specific entry. - debug!("Processing the requests..."); - let model = BrokenspokePipeline::find_by_id(params.bna_id) - .one(&db) - .await?; - let res: Response = match model { - Some(model) => json!(model).into_response().await, - None => entry_not_found(&event).into(), + // + let select = BrokenspokePipeline::find(); + let query = select + .clone() + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page) + .await; + let res: Response = match query { + Ok(models) => { + let total_items = select.count(&db).await?; + build_paginated_response( + json!(models), + total_items, + pagination.page, + pagination.page_size, + &event, + )? + } + Err(e) => internal_error(&event, e.to_string().as_str()).into(), }; Ok(res) } diff --git a/lambdas/src/bnas/get-bnas.rs b/lambdas/src/bnas/get-bnas.rs index 9c2ba33..8b33f7a 100644 --- a/lambdas/src/bnas/get-bnas.rs +++ b/lambdas/src/bnas/get-bnas.rs @@ -1,5 +1,8 @@ use dotenv::dotenv; -use effortless::{api::entry_not_found, fragment::BnaRequestExt}; +use effortless::{ + api::{entry_not_found, extract_pagination_parameters}, + fragment::BnaRequestExt, +}; use entity::{core_services, features, infrastructure, opportunity, prelude::*, recreation}; use lambda_http::{run, service_fn, Body, Error, IntoResponse, Request, Response}; use lambdas::{ @@ -8,7 +11,7 @@ use lambdas::{ extract_path_parameters, extract_query_parameters, BNAComponent, BNAPathParameters, BNAQueryParameters, }, - build_paginated_response, pagination_parameters, + build_paginated_response, }; use sea_orm::{ prelude::Uuid, EntityTrait, FromQueryResult, JoinType, PaginatorTrait, QuerySelect, @@ -21,7 +24,7 @@ use tracing::{debug, info}; #[derive(FromQueryResult, Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BNA { // BNA Summary - pub bna_uuid: Uuid, + pub bna_id: Uuid, pub city_id: Uuid, pub score: f64, pub version: String, @@ -213,8 +216,8 @@ async fn function_handler(event: Request) -> Result, Error> { } // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), + let pagination = match extract_pagination_parameters(&event) { + Ok(p) => p, Err(e) => return Ok(e), }; @@ -222,11 +225,17 @@ async fn function_handler(event: Request) -> Result, Error> { let select = Summary::find(); let body = select .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page - 1) .await?; let total_items = select.count(&db).await?; - build_paginated_response(json!(body), total_items, page, page_size, &event) + build_paginated_response( + json!(body), + total_items, + pagination.page, + pagination.page_size, + &event, + ) } #[tokio::main] @@ -245,49 +254,49 @@ async fn main() -> Result<(), Error> { }) } -// #[cfg(test)] -// mod tests { +#[cfg(test)] +mod tests { -// use super::*; -// use aws_lambda_events::http; -// use lambda_http::RequestExt; -// use std::collections::HashMap; + use super::*; + use aws_lambda_events::http; + use lambda_http::RequestExt; + use std::collections::HashMap; -// #[tokio::test] -// async fn test_handler_all() { -// let event = http::Request::builder() -// .header(http::header::CONTENT_TYPE, "application/json") -// .body(Body::Empty) -// .expect("failed to build request") -// .with_path_parameters(HashMap::from([( -// "id".to_string(), -// "837082b8-c8a0-469e-b310-c868d7f140a2".to_string(), // Santa Monica, CA -// )])) -// .with_request_context(lambda_http::request::RequestContext::ApiGatewayV2( -// lambda_http::aws_lambda_events::apigw::ApiGatewayV2httpRequestContext::default(), -// )); -// let r = function_handler(event).await.unwrap(); -// dbg!(r); -// } + #[tokio::test] + async fn test_handler_all() { + let event = http::Request::builder() + .header(http::header::CONTENT_TYPE, "application/json") + .body(Body::Empty) + .expect("failed to build request") + .with_path_parameters(HashMap::from([( + "bna_id".to_string(), + "837082b8-c8a0-469e-b310-c868d7f140a2".to_string(), // Santa Monica, CA + )])) + .with_request_context(lambda_http::request::RequestContext::ApiGatewayV2( + lambda_http::aws_lambda_events::apigw::ApiGatewayV2httpRequestContext::default(), + )); + let r = function_handler(event).await.unwrap(); + dbg!(r); + } -// #[tokio::test] -// async fn test_handler_opportunity() { -// let event = http::Request::builder() -// .header(http::header::CONTENT_TYPE, "application/json") -// .body(Body::Empty) -// .expect("failed to build request") -// .with_path_parameters(HashMap::from([( -// "id".to_string(), -// "837082b8-c8a0-469e-b310-c868d7f140a2".to_string(), // Santa Monica, CA -// )])) -// .with_query_string_parameters(HashMap::from([( -// "component".to_string(), -// "Opportunity".to_string(), -// )])) -// .with_request_context(lambda_http::request::RequestContext::ApiGatewayV2( -// lambda_http::aws_lambda_events::apigw::ApiGatewayV2httpRequestContext::default(), -// )); -// let r = function_handler(event).await.unwrap(); -// dbg!(r); -// } -// } + // #[tokio::test] + // async fn test_handler_opportunity() { + // let event = http::Request::builder() + // .header(http::header::CONTENT_TYPE, "application/json") + // .body(Body::Empty) + // .expect("failed to build request") + // .with_path_parameters(HashMap::from([( + // "id".to_string(), + // "837082b8-c8a0-469e-b310-c868d7f140a2".to_string(), // Santa Monica, CA + // )])) + // .with_query_string_parameters(HashMap::from([( + // "component".to_string(), + // "Opportunity".to_string(), + // )])) + // .with_request_context(lambda_http::request::RequestContext::ApiGatewayV2( + // lambda_http::aws_lambda_events::apigw::ApiGatewayV2httpRequestContext::default(), + // )); + // let r = function_handler(event).await.unwrap(); + // dbg!(r); + // } +} diff --git a/lambdas/src/bnas/post-bnas.rs b/lambdas/src/bnas/post-bnas.rs index 5039d9b..789003a 100644 --- a/lambdas/src/bnas/post-bnas.rs +++ b/lambdas/src/bnas/post-bnas.rs @@ -1,13 +1,10 @@ use dotenv::dotenv; -use effortless::api::parse_request_body; +use effortless::{api::parse_request_body, response::make_json_created_response}; use entity::{ core_services, features, infrastructure, opportunity, recreation, summary, wrappers::bna::BNAPost, }; -use lambda_http::{ - http::{header, StatusCode}, - run, service_fn, Body, Error, Request, Response, -}; +use lambda_http::{run, service_fn, Body, Error, Request, Response}; use lambdas::database_connect; use sea_orm::{ActiveModelTrait, ActiveValue}; use serde_json::json; @@ -119,11 +116,8 @@ async fn function_handler(event: Request) -> Result, Error> { recreation_res, ); info!("{:?}", res); - let response = Response::builder() - .status(StatusCode::CREATED) - .header(header::CONTENT_TYPE, "application/json") - .body(Body::Text(json!(res).to_string())) - .expect("unable to build http::Response"); + let response = + make_json_created_response(json!(res).to_string()).expect("unable to build http::Response"); Ok(response) // Ok(Body::Empty.into_response().await) diff --git a/lambdas/src/cities/get-cities-bnas.rs b/lambdas/src/cities/get-cities-bnas.rs index 126b93b..7f06cff 100644 --- a/lambdas/src/cities/get-cities-bnas.rs +++ b/lambdas/src/cities/get-cities-bnas.rs @@ -1,11 +1,11 @@ use dotenv::dotenv; -use effortless::api::entry_not_found; +use effortless::api::{entry_not_found, extract_pagination_parameters}; use entity::{city, summary}; use lambda_http::{run, service_fn, Body, Error, Request, Response}; use lambdas::{ build_paginated_response, cities::{extract_path_parameters, CitiesPathParameters}, - database_connect, pagination_parameters, + database_connect, }; use sea_orm::{EntityTrait, PaginatorTrait}; use serde_json::json; @@ -21,8 +21,8 @@ async fn function_handler(event: Request) -> Result, Error> { }; // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), + let pagination = match extract_pagination_parameters(&event) { + Ok(p) => p, Err(e) => return Ok(e), }; @@ -34,14 +34,20 @@ async fn function_handler(event: Request) -> Result, Error> { .find_also_related(summary::Entity); let model = select .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page) .await?; if model.is_empty() { return Ok(entry_not_found(&event).into()); } let total_items = select.count(&db).await?; - build_paginated_response(json!(model), total_items, page, page_size, &event) + build_paginated_response( + json!(model), + total_items, + pagination.page, + pagination.page_size, + &event, + ) } #[tokio::main] diff --git a/lambdas/src/cities/get-cities-census.rs b/lambdas/src/cities/get-cities-census.rs index 621a112..ad362c0 100644 --- a/lambdas/src/cities/get-cities-census.rs +++ b/lambdas/src/cities/get-cities-census.rs @@ -1,11 +1,11 @@ use dotenv::dotenv; -use effortless::api::entry_not_found; +use effortless::api::{entry_not_found, extract_pagination_parameters}; use entity::{census, city}; use lambda_http::{run, service_fn, Body, Error, Request, Response}; use lambdas::{ build_paginated_response, cities::{extract_path_parameters, CitiesPathParameters}, - database_connect, pagination_parameters, + database_connect, }; use sea_orm::{EntityTrait, PaginatorTrait}; use serde_json::json; @@ -37,8 +37,8 @@ async fn function_handler(event: Request) -> Result, Error> { }; // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), + let pagination = match extract_pagination_parameters(&event) { + Ok(p) => p, Err(e) => return Ok(e), }; @@ -50,14 +50,20 @@ async fn function_handler(event: Request) -> Result, Error> { .find_also_related(census::Entity); let model = select .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page - 1) .await?; if model.is_empty() { return Ok(entry_not_found(&event).into()); } let total_items = select.count(&db).await?; - build_paginated_response(json!(model), total_items, page, page_size, &event) + build_paginated_response( + json!(model), + total_items, + pagination.page, + pagination.page_size, + &event, + ) } // #[cfg(test)] diff --git a/lambdas/src/cities/get-cities-submissions.rs b/lambdas/src/cities/get-cities-submissions.rs index 7956d09..b96bea3 100644 --- a/lambdas/src/cities/get-cities-submissions.rs +++ b/lambdas/src/cities/get-cities-submissions.rs @@ -1,12 +1,15 @@ use dotenv::dotenv; use effortless::{ - api::{entry_not_found, parse_path_parameter, parse_query_string_parameter}, + api::{ + entry_not_found, extract_pagination_parameters, parse_path_parameter, + parse_query_string_parameter, + }, error::{APIError, APIErrors}, fragment::get_apigw_request_id, }; -use entity::{prelude::*, wrappers}; +use entity::prelude::*; use lambda_http::{run, service_fn, Body, Error, IntoResponse, Request, Response}; -use lambdas::{api_database_connect, build_paginated_response, pagination_parameters}; +use lambdas::{api_database_connect, build_paginated_response}; use sea_orm::{ColumnTrait, Condition, EntityTrait, PaginatorTrait, QueryFilter}; use serde_json::json; use tracing::{debug, info}; @@ -21,8 +24,8 @@ async fn function_handler(event: Request) -> Result, Error> { }; // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), + let pagination = match extract_pagination_parameters(&event) { + Ok(p) => p, Err(e) => return Ok(e), }; @@ -31,11 +34,10 @@ async fn function_handler(event: Request) -> Result, Error> { // Retrieve the status parameter if available. let query_param_key = "status"; - match parse_query_string_parameter::(&event, query_param_key) { + match parse_query_string_parameter::(&event, query_param_key) { Ok(status) => { if let Some(status) = status { - let s: entity::sea_orm_active_enums::ApprovalStatus = status.into(); - conditions = conditions.add(entity::submission::Column::Status.eq(s)) + conditions = conditions.add(entity::submission::Column::Status.eq(status)) } } Err(e) => return Ok(e.into()), @@ -65,13 +67,19 @@ async fn function_handler(event: Request) -> Result, Error> { let select = Submission::find().filter(conditions); let query = select .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page) .await; let res: Response = match query { Ok(models) => { let total_items = select.count(&db).await?; - build_paginated_response(json!(models), total_items, page, page_size, &event)? + build_paginated_response( + json!(models), + total_items, + pagination.page, + pagination.page_size, + &event, + )? } Err(e) => { let api_error = APIError::with_pointer( diff --git a/lambdas/src/cities/get-cities.rs b/lambdas/src/cities/get-cities.rs index 990d9a7..e72febb 100644 --- a/lambdas/src/cities/get-cities.rs +++ b/lambdas/src/cities/get-cities.rs @@ -1,11 +1,14 @@ use dotenv::dotenv; -use effortless::{api::entry_not_found, fragment::BnaRequestExt}; +use effortless::{ + api::{entry_not_found, extract_pagination_parameters}, + fragment::BnaRequestExt, +}; use entity::city; use lambda_http::{run, service_fn, Body, Error, IntoResponse, Request, Response}; use lambdas::{ build_paginated_response, cities::{extract_path_parameters, CitiesPathParameters}, - database_connect, pagination_parameters_2, + database_connect, }; use sea_orm::{EntityTrait, PaginatorTrait}; use serde_json::json; @@ -35,7 +38,7 @@ async fn function_handler(event: Request) -> Result, Error> { } // Retrieve pagination parameters if any. - let pagination = match pagination_parameters_2(&event) { + let pagination = match extract_pagination_parameters(&event) { Ok(p) => p, Err(e) => return Ok(e), }; @@ -45,7 +48,7 @@ async fn function_handler(event: Request) -> Result, Error> { let body = select .clone() .paginate(&db, pagination.page_size) - .fetch_page(pagination.page - 1) + .fetch_page(pagination.page) .await?; let total_items = select.count(&db).await?; build_paginated_response( diff --git a/lambdas/src/cities/post-cities.rs b/lambdas/src/cities/post-cities.rs index c609af7..2f93e88 100644 --- a/lambdas/src/cities/post-cities.rs +++ b/lambdas/src/cities/post-cities.rs @@ -1,17 +1,14 @@ use dotenv::dotenv; -use effortless::api::{invalid_body, parse_request_body}; -use entity::{ - country, - prelude::*, - wrappers::{self, city::CityPost}, -}; -use lambda_http::{ - http::{header, StatusCode}, - run, service_fn, Body, Error, Request, Response, +use effortless::{ + api::{invalid_body, parse_request_body}, + response::make_json_created_response, }; +use entity::{country, prelude::*, state_region_crosswalk, wrappers::city::CityPost}; +use lambda_http::{run, service_fn, Body, Error, Request, Response}; use lambdas::database_connect; use sea_orm::{ - ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, + ActiveModelTrait, ActiveValue, ColumnTrait, DatabaseConnection, DbErr, EntityTrait, + IntoActiveModel, QueryFilter, }; use serde_json::json; use tracing::info; @@ -57,12 +54,7 @@ async fn function_handler(event: Request) -> Result, Error> { let db = database_connect(Some("DATABASE_URL_SECRET_ID")).await?; // Ensure the country is a valid one. - if Country::find() - .filter(country::Column::Name.eq(&country)) - .one(&db) - .await? - .is_none() - { + if !find_country(&db, &country).await? { return Ok(invalid_body( &event, "the country `{country}` is not in the list of countries supported by the BNA", @@ -72,13 +64,13 @@ async fn function_handler(event: Request) -> Result, Error> { // If the country is the United States, set the region to the standardized state abbreviation. if country.to_lowercase().eq("united states") { - match StateRegionCrosswalk::find_by_id(state_full) + match StateRegionCrosswalk::find() + .filter(state_region_crosswalk::Column::State.eq(state_full)) .one(&db) .await? { Some(model) => { - let region: wrappers::BnaRegion = model.region.into(); - active_city.region = ActiveValue::Set(Some(region.to_string())); + active_city.region = ActiveValue::Set(Some(model.region)); } None => return Ok(invalid_body(&event, "invalid state: {state_full}").into()), } @@ -92,14 +84,29 @@ async fn function_handler(event: Request) -> Result, Error> { // And insert a new entry. info!("inserting City into database: {:?}", active_city); let city = active_city.insert(&db).await?; - let response = Response::builder() - .status(StatusCode::CREATED) - .header(header::CONTENT_TYPE, "application/json") - .body(Body::Text(json!(city).to_string())) + let response = make_json_created_response(json!(city).to_string()) .expect("unable to build http::Response"); Ok(response) } +async fn find_country(db: &DatabaseConnection, country: &str) -> Result { + Ok(Country::find() + .filter(country::Column::Name.eq(country)) + .one(db) + .await? + .is_none()) +} + +// async fn find_us_state_abrev( +// db: &DatabaseConnection, +// state_full: &str, +// ) -> Result, DbErr> { +// StateRegionCrosswalk::find_by_id(state_full) +// .one(&db) +// .await? +// .map(|m| Some(m.region.to_string())) +// } + #[cfg(test)] mod tests { // use super::*; diff --git a/lambdas/src/lib.rs b/lambdas/src/lib.rs index c7ab4ca..096c34a 100644 --- a/lambdas/src/lib.rs +++ b/lambdas/src/lib.rs @@ -4,10 +4,11 @@ pub mod link_header; use bnacore::aws::get_aws_secrets_value; use effortless::{ + api::DEFAULT_PAGE_SIZE, error::{APIError, APIErrors}, - fragment::{get_apigw_request_id, BnaRequestExt}, + fragment::BnaRequestExt, }; -use lambda_http::{Body, Error, Request, RequestExt, Response}; +use lambda_http::{Body, Error, Request, Response}; use sea_orm::{Database, DatabaseConnection, DbErr}; use serde_json::Value; use std::env; @@ -16,10 +17,10 @@ use tracing::{debug, error}; /// The result type to return to the caller of the Lambda API handler. pub type APIResult = std::result::Result>; -/// Maximum number of items allowed to be returned by a query at once. -pub const MAX_PAGE_SIZE: u64 = 100; -/// Number of items to return per page if no argument was provided. -pub const DEFAULT_PAGE_SIZE: u64 = 50; +// /// Maximum number of items allowed to be returned by a query at once. +// pub const MAX_PAGE_SIZE: u64 = 100; +// /// Number of items to return per page if no argument was provided. +// pub const DEFAULT_PAGE_SIZE: u64 = 50; /// Returns the database connection. /// @@ -42,69 +43,69 @@ pub async fn database_connect(secret_id: Option<&str>) -> Result APIResult<(u64, u64)> { - debug!("Retrieving pagination..."); - let apigw_request_id = get_apigw_request_id(event); - let page_size = match event - .query_string_parameters() - .first("page_size") - .unwrap_or(DEFAULT_PAGE_SIZE.to_string().as_str()) - .parse::() - { - Ok(page_size) => match page_size { - 0 => 1, - 1..=MAX_PAGE_SIZE => page_size, - _ => MAX_PAGE_SIZE, - }, - Err(e) => { - let api_error = APIError::with_parameter( - apigw_request_id, - "page_size", - format!("Failed to process the page_size parameter: {e}").as_str(), - ); - return Err(APIErrors::new(&[api_error]).into()); - } - }; - let page = match event - .query_string_parameters() - .first("page") - .unwrap_or("1") - .parse::() - { - Ok(page) => match page { - 0 => 1, - _ => page, - }, - Err(e) => { - let api_error = APIError::with_parameter( - apigw_request_id, - "page", - format!("Failed to process the page parameter: {e}").as_str(), - ); - return Err(APIErrors::new(&[api_error]).into()); - } - }; - - Ok((page_size, page)) -} +// /// Retrieves the pagination parameters. +// /// +// /// If nothing is provided, the first page is returned and will contain up to +// /// [`DEFAULT_PAGE_SIZE`] items. +// /// +// /// If `page` does not exist, the lambda functions will return an empty array. +// pub fn pagination_parameters(event: &Request) -> APIResult<(u64, u64)> { +// debug!("Retrieving pagination..."); +// let apigw_request_id = get_apigw_request_id(event); +// let page_size = match event +// .query_string_parameters() +// .first("page_size") +// .unwrap_or(DEFAULT_PAGE_SIZE.to_string().as_str()) +// .parse::() +// { +// Ok(page_size) => match page_size { +// 0 => 1, +// 1..=MAX_PAGE_SIZE => page_size, +// _ => MAX_PAGE_SIZE, +// }, +// Err(e) => { +// let api_error = APIError::with_parameter( +// apigw_request_id, +// "page_size", +// format!("Failed to process the page_size parameter: {e}").as_str(), +// ); +// return Err(APIErrors::new(&[api_error]).into()); +// } +// }; +// let page = match event +// .query_string_parameters() +// .first("page") +// .unwrap_or("1") +// .parse::() +// { +// Ok(page) => match page { +// 0 => 1, +// _ => page, +// }, +// Err(e) => { +// let api_error = APIError::with_parameter( +// apigw_request_id, +// "page", +// format!("Failed to process the page parameter: {e}").as_str(), +// ); +// return Err(APIErrors::new(&[api_error]).into()); +// } +// }; + +// Ok((page_size, page)) +// } /// Represent the query parameters related to the pagination. -pub struct PaginationParameters { - /// The number of items per page. - pub page_size: u64, - /// The result page being returned. - pub page: u64, -} +// pub struct PaginationParameters { +// /// The number of items per page. +// pub page_size: u64, +// /// The result page being returned. +// pub page: u64, +// } -pub fn pagination_parameters_2(event: &Request) -> Result> { - pagination_parameters(event).map(|(page_size, page)| PaginationParameters { page_size, page }) -} +// pub fn pagination_parameters_2(event: &Request) -> Result> { +// pagination_parameters(event).map(|(page_size, page)| PaginationParameters { page_size, page }) +// } /// Builds a paginated Response. /// @@ -311,7 +312,7 @@ mod tests { use aws_lambda_events::http; use effortless::api::{parse_path_parameter, parse_request_body}; use entity::wrappers::submission::SubmissionPost; - use lambda_http::{http::StatusCode, request::from_str, RequestExt}; + use lambda_http::RequestExt; use std::collections::HashMap; #[test] @@ -333,84 +334,6 @@ mod tests { assert_eq!(nav.last(), 2); } - #[test] - fn test_pagination_parameters_without_params() { - let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); - let req = from_str(input).unwrap(); - - let actual = pagination_parameters(&req).unwrap(); - assert_eq!(actual.0, DEFAULT_PAGE_SIZE); - assert_eq!(actual.1, 1); - } - - #[test] - fn test_pagination_parameters_with_valid_params() { - const PAGE_SIZE: u64 = 25; - const PAGE: u64 = 8; - - let mut data = HashMap::new(); - data.insert("page_size".into(), vec![PAGE_SIZE.to_string()]); - data.insert("page".into(), vec![PAGE.to_string()]); - - let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); - let result = from_str(input).unwrap(); - let req = result.with_query_string_parameters(data); - - let actual = pagination_parameters(&req).unwrap(); - assert_eq!(actual.0, PAGE_SIZE); - assert_eq!(actual.1, PAGE); - } - - #[test] - fn test_pagination_parameters_with_invalid_page_size() { - let mut data = HashMap::new(); - data.insert("page_size".into(), vec!["-1".to_string()]); - data.insert("page".into(), vec!["50".to_string()]); - - let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); - let result = from_str(input).unwrap(); - let req = result.with_query_string_parameters(data); - - let actual = pagination_parameters(&req).unwrap_err(); - - // Ensure the error had the BAD_REQUEST status. - assert_eq!(actual.status(), StatusCode::BAD_REQUEST); - - // Ensure the error message is correct. - let b = actual.body(); - let message = match b { - Body::Text(message) => message, - _ => panic!("The body does not match the Text invariant."), - }; - let api_error: APIErrors = serde_json::from_str(message).unwrap(); - assert_eq!(api_error.errors.len(), 1) - } - - #[test] - fn test_pagination_parameters_with_invalid_page() { - let mut data = HashMap::new(); - data.insert("page_size".into(), vec!["1".to_string()]); - data.insert("page".into(), vec!["abc".to_string()]); - - let input = include_str!("fixtures/api-gateway-v2-proxy-request-minimal.json"); - let result = from_str(input).unwrap(); - let req = result.with_query_string_parameters(data); - - let actual = pagination_parameters(&req).unwrap_err(); - - // Ensure the error had the BAD_REQUEST status. - assert_eq!(actual.status(), StatusCode::BAD_REQUEST); - - // Ensure the error message is correct. - let b = actual.body(); - let message = match b { - Body::Text(message) => message, - _ => panic!("The body does not match the Text invariant."), - }; - let api_error: APIErrors = serde_json::from_str(message).unwrap(); - assert_eq!(api_error.errors.len(), 1) - } - #[test] fn test_parse_path_parameter() { let event = Request::default() diff --git a/lambdas/src/price-fargate/get-price-fargate.rs b/lambdas/src/price-fargate/get-price-fargate.rs index 65823ff..84eaf64 100644 --- a/lambdas/src/price-fargate/get-price-fargate.rs +++ b/lambdas/src/price-fargate/get-price-fargate.rs @@ -1,12 +1,15 @@ use dotenv::dotenv; use effortless::{ - api::{entry_not_found, invalid_path_parameter, parse_query_string_parameter}, + api::{ + entry_not_found, extract_pagination_parameters, invalid_path_parameter, + parse_query_string_parameter, + }, error::APIError, fragment::{get_apigw_request_id, BnaRequestExt}, }; use entity::prelude::*; use lambda_http::{run, service_fn, Body, Error, IntoResponse, Request, Response}; -use lambdas::{api_database_connect, build_paginated_response, pagination_parameters}; +use lambdas::{api_database_connect, build_paginated_response}; use sea_orm::{EntityTrait, PaginatorTrait, QueryOrder, QuerySelect}; use serde_json::json; use tracing::{debug, info}; @@ -15,8 +18,8 @@ async fn function_handler(event: Request) -> Result, Error> { dotenv().ok(); // Retrieve pagination parameters if any. - let (page_size, page) = match pagination_parameters(&event) { - Ok((page_size, page)) => (page_size, page), + let pagination = match extract_pagination_parameters(&event) { + Ok(p) => p, Err(e) => return Ok(e), }; @@ -69,13 +72,19 @@ async fn function_handler(event: Request) -> Result, Error> { // Select the results. let query = select .clone() - .paginate(&db, page_size) - .fetch_page(page - 1) + .paginate(&db, pagination.page_size) + .fetch_page(pagination.page) .await; let res: Response = match query { Ok(models) => { let total_items = select.count(&db).await?; - build_paginated_response(json!(models), total_items, page, page_size, &event)? + build_paginated_response( + json!(models), + total_items, + pagination.page, + pagination.page_size, + &event, + )? } Err(e) => APIError::with_pointer( get_apigw_request_id(&event), diff --git a/migration/src/m20220101_000001_main.rs b/migration/src/m20220101_000001_main.rs index 6b64121..94f511b 100644 --- a/migration/src/m20220101_000001_main.rs +++ b/migration/src/m20220101_000001_main.rs @@ -1,8 +1,4 @@ -use sea_orm_migration::{ - prelude::*, - sea_orm::{EnumIter, Iterable}, - sea_query::extension::postgres::Type, -}; +use sea_orm_migration::prelude::*; #[derive(DeriveMigrationName)] pub struct Migration; @@ -10,63 +6,32 @@ pub struct Migration; #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { - // Create the BNA Region type. - manager - .create_type( - Type::create() - .as_enum(BNARegion::Table) - .values(BNARegion::iter().skip(1)) - .to_owned(), - ) - .await?; - - // Create the StateSpeedLimit table. - manager - .create_table( - Table::create() - .table(StateSpeedLimit::Table) - .col( - ColumnDef::new(StateSpeedLimit::StateAbbrev) - .char_len(2) - .not_null(), - ) - .col( - ColumnDef::new(StateSpeedLimit::StateFIPSCode) - .char_len(2) - .not_null(), - ) - .col(ColumnDef::new(StateSpeedLimit::Speed).integer().not_null()) - .col( - ColumnDef::new(StateSpeedLimit::CreatedAt) - .timestamp_with_time_zone() - .default(Expr::current_timestamp()) - .not_null(), - ) - .col(ColumnDef::new(StateSpeedLimit::UpdatedAt).timestamp_with_time_zone()) - .primary_key(Index::create().col(StateSpeedLimit::StateAbbrev)) - .to_owned(), - ) - .await?; - - // Create the StateRegionCrosswalk table. + // Create the BNA Region table. manager .create_table( Table::create() - .table(StateRegionCrosswalk::Table) + .table(BNARegion::Table) + .if_not_exists() .col( - ColumnDef::new(StateRegionCrosswalk::State) + ColumnDef::new(BNARegion::Name) .string() - .not_null() - .primary_key(), - ) - .col( - ColumnDef::new(StateRegionCrosswalk::Region) - .enumeration(BNARegion::Table, BNARegion::iter().skip(1)) + .primary_key() .not_null(), ) .to_owned(), ) .await?; + let insert_bna_regions = Query::insert() + .into_table(BNARegion::Table) + .columns([BNARegion::Name]) + .values_panic(["Mid-Atlantic".into()]) + .values_panic(["Midwest".into()]) + .values_panic(["Mountain".into()]) + .values_panic(["New England".into()]) + .values_panic(["Pacific".into()]) + .values_panic(["South".into()]) + .to_owned(); + manager.exec_stmt(insert_bna_regions).await?; // Create the country table. manager @@ -74,17 +39,10 @@ impl MigrationTrait for Migration { Table::create() .table(Country::Table) .if_not_exists() - .col( - ColumnDef::new(Country::Id) - .integer() - .primary_key() - .auto_increment() - .not_null(), - ) .col( ColumnDef::new(Country::Name) .string() - .unique_key() + .primary_key() .not_null(), ) .to_owned(), @@ -93,6 +51,7 @@ impl MigrationTrait for Migration { let insert_countries = Query::insert() .into_table(Country::Table) .columns([Country::Name]) + .values_panic(["Australia".into()]) .values_panic(["Belgium".into()]) .values_panic(["Brazil".into()]) .values_panic(["Canada".into()]) @@ -100,6 +59,7 @@ impl MigrationTrait for Migration { .values_panic(["Colombia".into()]) .values_panic(["Croatia".into()]) .values_panic(["Cuba".into()]) + .values_panic(["England".into()]) .values_panic(["France".into()]) .values_panic(["Germany".into()]) .values_panic(["Greece".into()]) @@ -121,6 +81,225 @@ impl MigrationTrait for Migration { .to_owned(); manager.exec_stmt(insert_countries).await?; + // Create the US state table. + manager + .create_table( + Table::create() + .table(USState::Table) + .if_not_exists() + .col( + ColumnDef::new(USState::Name) + .string() + .primary_key() + .not_null(), + ) + .col( + ColumnDef::new(USState::Abbrev) + .string() + .not_null() + .unique_key(), + ) + .col( + ColumnDef::new(USState::FIPSCode) + .char_len(2) + .not_null() + .unique_key(), + ) + .col(ColumnDef::new(USState::SpeedLimit).integer().not_null()) + .to_owned(), + ) + .await?; + manager + .create_index( + Index::create() + .table(USState::Table) + .col(USState::Abbrev) + .to_owned(), + ) + .await?; + manager + .create_index( + Index::create() + .table(USState::Table) + .col(USState::FIPSCode) + .to_owned(), + ) + .await?; + let insert_us_states = Query::insert() + .into_table(USState::Table) + .columns([ + USState::Name, + USState::Abbrev, + USState::FIPSCode, + USState::SpeedLimit, + ]) + .values_panic(["Alabama".into(), "AL".into(), "01".into(), 25.into()]) + .values_panic(["Alaska".into(), "AK".into(), "02".into(), 25.into()]) + .values_panic(["Arizona".into(), "AZ".into(), "04".into(), 25.into()]) + .values_panic(["Arkansas".into(), "AR".into(), "05".into(), 30.into()]) + .values_panic(["California".into(), "CA".into(), "06".into(), 25.into()]) + .values_panic(["Colorado".into(), "CO".into(), "08".into(), 30.into()]) + .values_panic(["Connecticut".into(), "CT".into(), "09".into(), 25.into()]) + .values_panic(["Delaware".into(), "DE".into(), "10".into(), 25.into()]) + .values_panic([ + "District of Columbia".into(), + "DC".into(), + "11".into(), + 20.into(), + ]) + .values_panic(["Florida".into(), "FL".into(), "12".into(), 30.into()]) + .values_panic(["Georgia".into(), "GA".into(), "13".into(), 30.into()]) + .values_panic(["Hawaii".into(), "HI".into(), "15".into(), 25.into()]) + .values_panic(["Idaho".into(), "ID".into(), "16".into(), 35.into()]) + .values_panic(["Illinois".into(), "IL".into(), "17".into(), 30.into()]) + .values_panic(["Indiana".into(), "IN".into(), "18".into(), 30.into()]) + .values_panic(["Iowa".into(), "IA".into(), "19".into(), 25.into()]) + .values_panic(["Kansas".into(), "KS".into(), "20".into(), 30.into()]) + .values_panic(["Kentucky".into(), "KY".into(), "21".into(), 35.into()]) + .values_panic(["Louisiana".into(), "LA".into(), "22".into(), 25.into()]) + .values_panic(["Maine".into(), "ME".into(), "23".into(), 25.into()]) + .values_panic(["Maryland".into(), "MD".into(), "24".into(), 30.into()]) + .values_panic(["Massachusetts".into(), "MA".into(), "25".into(), 25.into()]) + .values_panic(["Michigan".into(), "MI".into(), "26".into(), 25.into()]) + .values_panic(["Minnesota".into(), "MN".into(), "27".into(), 30.into()]) + .values_panic(["Mississippi".into(), "MS".into(), "28".into(), 25.into()]) + .values_panic(["Missouri".into(), "MO".into(), "29".into(), 25.into()]) + .values_panic(["Montana".into(), "MT".into(), "30".into(), 25.into()]) + .values_panic(["Nebraska".into(), "NE".into(), "31".into(), 25.into()]) + .values_panic(["Nevada".into(), "NV".into(), "32".into(), 25.into()]) + .values_panic(["New Hampshire".into(), "NH".into(), "33".into(), 30.into()]) + .values_panic(["New Jersey".into(), "NJ".into(), "34".into(), 25.into()]) + .values_panic(["New Mexico".into(), "NM".into(), "35".into(), 30.into()]) + .values_panic(["New York".into(), "NY".into(), "36".into(), 20.into()]) + .values_panic(["North Carolina".into(), "NC".into(), "37".into(), 35.into()]) + .values_panic(["North Dakota".into(), "ND".into(), "38".into(), 25.into()]) + .values_panic(["Ohio".into(), "OH".into(), "39".into(), 25.into()]) + .values_panic(["Oklahoma".into(), "OK".into(), "40".into(), 25.into()]) + .values_panic(["Oregon".into(), "OR".into(), "41".into(), 25.into()]) + .values_panic(["Pennsylvania".into(), "PA".into(), "42".into(), 25.into()]) + .values_panic(["Rhode Island".into(), "RI".into(), "44".into(), 25.into()]) + .values_panic(["South Carolina".into(), "SC".into(), "45".into(), 30.into()]) + .values_panic(["South Dakota".into(), "SD".into(), "46".into(), 25.into()]) + .values_panic(["Tennessee".into(), "TN".into(), "47".into(), 25.into()]) + .values_panic(["Texas".into(), "TX".into(), "48".into(), 30.into()]) + .values_panic(["Utah".into(), "UT".into(), "49".into(), 25.into()]) + .values_panic(["Vermont".into(), "VT".into(), "50".into(), 25.into()]) + .values_panic(["Virginia".into(), "VA".into(), "51".into(), 25.into()]) + .values_panic(["Washington".into(), "WA".into(), "53".into(), 25.into()]) + .values_panic(["West Virginia".into(), "WV".into(), "54".into(), 25.into()]) + .values_panic(["Wisconsin".into(), "WI".into(), "55".into(), 25.into()]) + .values_panic(["Wyoming".into(), "WY".into(), "56".into(), 30.into()]) + .values_panic(["Puerto Rico".into(), "PR".into(), "77".into(), 25.into()]) + .to_owned(); + manager.exec_stmt(insert_us_states).await?; + + // Create the StateRegionCrosswalk table. + manager + .create_table( + Table::create() + .table(StateRegionCrosswalk::Table) + .col( + ColumnDef::new(StateRegionCrosswalk::State) + .string() + .not_null(), + ) + .col( + ColumnDef::new(StateRegionCrosswalk::Region) + .string() + .not_null(), + ) + .primary_key( + Index::create() + .col(StateRegionCrosswalk::State) + .col(StateRegionCrosswalk::Region), + ) + .foreign_key( + ForeignKey::create() + .from(StateRegionCrosswalk::Table, StateRegionCrosswalk::State) + .to(USState::Table, USState::Name), + ) + .foreign_key( + ForeignKey::create() + .from(StateRegionCrosswalk::Table, StateRegionCrosswalk::Region) + .to(BNARegion::Table, BNARegion::Name), + ) + .to_owned(), + ) + .await?; + manager + .create_index( + Index::create() + .table(StateRegionCrosswalk::Table) + .col(StateRegionCrosswalk::State) + .to_owned(), + ) + .await?; + manager + .create_index( + Index::create() + .table(StateRegionCrosswalk::Table) + .col(StateRegionCrosswalk::Region) + .to_owned(), + ) + .await?; + let insert_state_region = Query::insert() + .into_table(StateRegionCrosswalk::Table) + .columns([StateRegionCrosswalk::State, StateRegionCrosswalk::Region]) + .values_panic(["Alabama".into(), "South".into()]) + .values_panic(["Alaska".into(), "Pacific".into()]) + .values_panic(["Arizona".into(), "Mountain".into()]) + .values_panic(["Arkansas".into(), "South".into()]) + .values_panic(["California".into(), "Pacific".into()]) + .values_panic(["Colorado".into(), "Mountain".into()]) + .values_panic(["Connecticut".into(), "New England".into()]) + .values_panic(["Delaware".into(), "Mid-Atlantic".into()]) + .values_panic(["District of Columbia".into(), "Mid-Atlantic".into()]) + .values_panic(["Florida".into(), "South".into()]) + .values_panic(["Georgia".into(), "South".into()]) + .values_panic(["Hawaii".into(), "Pacific".into()]) + .values_panic(["Idaho".into(), "Mountain".into()]) + .values_panic(["Illinois".into(), "Midwest".into()]) + .values_panic(["Indiana".into(), "Midwest".into()]) + .values_panic(["Iowa".into(), "Midwest".into()]) + .values_panic(["Kansas".into(), "Midwest".into()]) + .values_panic(["Kentucky".into(), "South".into()]) + .values_panic(["Louisiana".into(), "South".into()]) + .values_panic(["Maine".into(), "New England".into()]) + .values_panic(["Maryland".into(), "Mid-Atlantic".into()]) + .values_panic(["Massachusetts".into(), "New England".into()]) + .values_panic(["Michigan".into(), "Midwest".into()]) + .values_panic(["Minnesota".into(), "Midwest".into()]) + .values_panic(["Mississippi".into(), "South".into()]) + .values_panic(["Missouri".into(), "Midwest".into()]) + .values_panic(["Montana".into(), "Mountain".into()]) + .values_panic(["Nebraska".into(), "Midwest".into()]) + .values_panic(["Nevada".into(), "Mountain".into()]) + .values_panic(["New Hampshire".into(), "New England".into()]) + .values_panic(["New Jersey".into(), "Mid-Atlantic".into()]) + .values_panic(["New Mexico".into(), "Mountain".into()]) + .values_panic(["New York".into(), "Mid-Atlantic".into()]) + .values_panic(["North Carolina".into(), "South".into()]) + .values_panic(["North Dakota".into(), "Midwest".into()]) + .values_panic(["Ohio".into(), "Midwest".into()]) + .values_panic(["Oklahoma".into(), "South".into()]) + .values_panic(["Oregon".into(), "Pacific".into()]) + .values_panic(["Pennsylvania".into(), "Mid-Atlantic".into()]) + .values_panic(["Rhode Island".into(), "New England".into()]) + .values_panic(["South Carolina".into(), "South".into()]) + .values_panic(["South Dakota".into(), "Midwest".into()]) + .values_panic(["Tennessee".into(), "South".into()]) + .values_panic(["Texas".into(), "South".into()]) + .values_panic(["Utah".into(), "Mountain".into()]) + .values_panic(["Vermont".into(), "New England".into()]) + .values_panic(["Virginia".into(), "South".into()]) + .values_panic(["Washington".into(), "Pacific".into()]) + .values_panic(["West Virginia".into(), "South".into()]) + .values_panic(["Wisconsin".into(), "Midwest".into()]) + .values_panic(["Wyoming".into(), "Mountain".into()]) + .values_panic(["Puerto Rico".into(), "South".into()]) + .to_owned(); + manager.exec_stmt(insert_state_region).await?; + // Create the city table. manager .create_table( @@ -149,6 +328,11 @@ impl MigrationTrait for Migration { .col(City::State) .col(City::Name), ) + .foreign_key( + ForeignKey::create() + .from(City::Table, City::Country) + .to(Country::Table, Country::Name), + ) .to_owned(), ) .await?; @@ -415,7 +599,16 @@ impl MigrationTrait for Migration { .drop_table(Table::drop().table(Infrastructure::Table).to_owned()) .await?; manager - .drop_table(Table::drop().table(StateSpeedLimit::Table).to_owned()) + .drop_table(Table::drop().table(USState::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(Country::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(StateRegionCrosswalk::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(BNARegion::Table).to_owned()) .await?; Ok(()) @@ -570,40 +763,17 @@ enum Infrastructure { HighStressMiles, } +// Lookup table for the BNA regions. #[derive(Iden)] -enum StateSpeedLimit { - Table, - /// Two-letter state abbreviation. - StateAbbrev, - /// State FIPS code. - StateFIPSCode, - /// State speed limit. - Speed, - /// Creation date. - CreatedAt, - /// Update date. - UpdatedAt, -} - -#[derive(Iden, EnumIter)] pub enum BNARegion { Table, - #[iden = "Mid-Atlantic"] - MidAtlantic, - #[iden = "Midwest"] - Midwest, - #[iden = "Mountain"] - Mountain, - #[iden = "New England"] - NewEngland, - #[iden = "Pacific"] - Pacific, - #[iden = "South"] - South, + /// Name of the BNA region. + Name, } +/// Lookup table for the state region crosswalks. #[derive(Iden)] -enum StateRegionCrosswalk { +pub enum StateRegionCrosswalk { Table, /// State name. State, @@ -611,11 +781,24 @@ enum StateRegionCrosswalk { Region, } +/// Lookup table for the countries. #[derive(Iden)] -enum Country { +pub enum Country { Table, - /// Country ID. - Id, /// Country name. Name, } + +/// Lookup table for the US states. +#[derive(Iden)] +pub enum USState { + Table, + /// State name. + Name, + /// Two-letter state abbreviation.. + Abbrev, + /// State FIPS code. + FIPSCode, + /// State speed limit in mph. + SpeedLimit, +} diff --git a/migration/src/m20231010_232527_city_submission.rs b/migration/src/m20231010_232527_city_submission.rs index b23a106..8b2e51c 100644 --- a/migration/src/m20231010_232527_city_submission.rs +++ b/migration/src/m20231010_232527_city_submission.rs @@ -1,5 +1,6 @@ -use sea_orm::{EnumIter, Iterable}; -use sea_orm_migration::{prelude::*, sea_query::extension::postgres::Type}; +use sea_orm_migration::prelude::*; + +use crate::m20220101_000001_main::Country; #[derive(DeriveMigrationName)] pub struct Migration; @@ -7,15 +8,29 @@ pub struct Migration; #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { - // Create the approval status type. + // Create the approval status table. manager - .create_type( - Type::create() - .as_enum(ApprovalStatus::Table) - .values(ApprovalStatus::iter().skip(1)) + .create_table( + Table::create() + .table(ApprovalStatus::Table) + .if_not_exists() + .col( + ColumnDef::new(ApprovalStatus::Status) + .string() + .primary_key() + .not_null(), + ) .to_owned(), ) .await?; + let insert_approval_statuses = Query::insert() + .into_table(ApprovalStatus::Table) + .columns([ApprovalStatus::Status]) + .values_panic(["Pending".into()]) + .values_panic(["Approved".into()]) + .values_panic(["Rejected".into()]) + .to_owned(); + manager.exec_stmt(insert_approval_statuses).await?; // Create the Submission table. manager @@ -45,18 +60,23 @@ impl MigrationTrait for Migration { .default("0"), ) .col(ColumnDef::new(Submission::Consent).boolean().not_null()) - .col( - ColumnDef::new(Submission::Status) - .enumeration(ApprovalStatus::Table, ApprovalStatus::iter().skip(1)) - .not_null() - .default(ApprovalStatus::Pending.to_string()), - ) + .col(ColumnDef::new(Submission::Status).string().not_null()) .col( ColumnDef::new(Submission::CreatedAt) .timestamp_with_time_zone() .default(Expr::current_timestamp()) .not_null(), ) + .foreign_key( + ForeignKey::create() + .from(Submission::Table, Submission::Status) + .to(ApprovalStatus::Table, ApprovalStatus::Status), + ) + .foreign_key( + ForeignKey::create() + .from(Submission::Table, Submission::Country) + .to(Country::Table, Country::Name), + ) .to_owned(), ) .await?; @@ -89,13 +109,9 @@ enum Submission { CreatedAt, } -#[derive(Iden, EnumIter)] +/// Lookup table for the approval statuses. +#[derive(Iden)] pub enum ApprovalStatus { Table, - #[iden = "Pending"] - Pending, - #[iden = "Approved"] - Approved, - #[iden = "Rejected"] - Rejected, + Status, } diff --git a/migration/src/m20240202_004130_brokenspoke_analyzer_pipeline.rs b/migration/src/m20240202_004130_brokenspoke_analyzer_pipeline.rs index 6341b9a..b762b7b 100644 --- a/migration/src/m20240202_004130_brokenspoke_analyzer_pipeline.rs +++ b/migration/src/m20240202_004130_brokenspoke_analyzer_pipeline.rs @@ -1,5 +1,4 @@ -use sea_orm::{EnumIter, Iterable}; -use sea_orm_migration::{prelude::*, sea_query::extension::postgres::Type}; +use sea_orm_migration::prelude::*; #[derive(DeriveMigrationName)] pub struct Migration; @@ -7,22 +6,34 @@ pub struct Migration; #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { - // Create the approval status type. + // Create the Brokenspoke Status lookup table. manager - .create_type( - Type::create() - .as_enum(BrokenspokeStatus::Table) - .values(BrokenspokeStatus::iter().skip(1)) + .create_table( + Table::create() + .table(BrokenspokeStatus::Table) + .if_not_exists() + .col( + ColumnDef::new(BrokenspokeStatus::Status) + .string() + .not_null() + .primary_key(), + ) .to_owned(), ) .await?; - // Create the approval status type. + // Create the Brokenspoke Step lookup table. manager - .create_type( - Type::create() - .as_enum(BrokenspokeStep::Table) - .values(BrokenspokeStep::iter().skip(1)) + .create_table( + Table::create() + .table(BrokenspokeStep::Table) + .if_not_exists() + .col( + ColumnDef::new(BrokenspokeStep::Step) + .string() + .not_null() + .primary_key(), + ) .to_owned(), ) .await?; @@ -39,10 +50,7 @@ impl MigrationTrait for Migration { .not_null() .primary_key(), ) - .col( - ColumnDef::new(BrokenspokePipeline::Step) - .enumeration(BrokenspokeStep::Table, BrokenspokeStep::iter().skip(1)), - ) + .col(ColumnDef::new(BrokenspokePipeline::Step).string()) .col(ColumnDef::new(BrokenspokePipeline::SqsMessage).json()) .col(ColumnDef::new(BrokenspokePipeline::FargateTaskARN).string()) .col(ColumnDef::new(BrokenspokePipeline::S3Bucket).string()) @@ -112,22 +120,29 @@ enum BrokenspokePipeline { TornDown, } -#[derive(Iden, EnumIter)] +/// Lookup table for the brokenspoke statuses. +// +// Pending, +// Started, +// Complete, +#[derive(Iden)] pub enum BrokenspokeStatus { Table, - Pending, - Started, - Complete, + Status, } -#[derive(Iden, EnumIter)] +/// Lookup table for the brokenspoke steps. +// +// SqsMessage, +// Setup, +// Analysis, +// Cleanup, +#[derive(Iden)] pub enum BrokenspokeStep { Table, - SqsMessage, - Setup, - Analysis, - Cleanup, + Step, } + // Pricing is coming from the CloudTempo calculator with the following paramaters: // - Architecture: x86 // - Region: US West (Oregon) diff --git a/openapi.yaml b/openapi.yaml index 0a864be..271fcc2 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -944,6 +944,7 @@ components: country: type: string enum: + - Australia - Belgium - Brazil - Canada @@ -951,6 +952,7 @@ components: - Colombia - Croatia - Cuba + - England - France - Germany - Greece @@ -1425,7 +1427,7 @@ components: name: page in: query required: false - description: "Page index (starting at 1)" + description: "Page index (starting at 0)" schema: type: integer default: 1