Skip to content

Commit

Permalink
AL: Bills: 2024 prefiles (#4719)
Browse files Browse the repository at this point in the history
* AL: Bills: 2024 prefiles
  • Loading branch information
showerst authored Nov 14, 2023
1 parent 7594ee7 commit 9508d84
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 4 deletions.
12 changes: 11 additions & 1 deletion scrapers/al/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,26 @@ class Alabama(State):
"name": "2023 Regular Session",
"start_date": "2023-03-07",
"end_date": "2023-06-08",
"active": False,
},
{
"_scraped_name": "Regular Session 2024",
"classification": "primary",
"identifier": "2024rs",
"name": "2024 Regular Session",
"start_date": "2024-02-06",
"end_date": "2024-05-24",
"active": True,
},
]
ignored_scraped_sessions = []

def get_session_list(self):
return ["Regular Session 2023"]
return ["Regular Session 2023", "Regular Session 2024"]

def get_scraper_ids(self, session):
ids = {
"2024rs": {"session_year": "2024", "session_type": "2024 Regular Session"},
"2023rs": {"session_year": "2023", "session_type": "2023 Regular Session"},
"2023s1": {
"session_year": "2023",
Expand Down
6 changes: 3 additions & 3 deletions scrapers/al/bills.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import dateutil
import requests
from openstates.scrape import Scraper, Bill, VoteEvent
from openstates.exceptions import EmptyScrape
from utils.media import get_media_type
from .actions import Categorizer

Expand Down Expand Up @@ -47,7 +46,6 @@ def scrape_bill_type(self, session, bill_type):
limit = 10000
# max of 10 pages in case something goes way wrong
while offset < 100000:
# WARNING: 2023 session id is currently hardcoded
json_data = {
"query": f'{{allInstrumentOverviews(instrumentType:"{bill_type}", instrumentNbr:"", body:"", sessionYear:"{self.session_year}", sessionType:"{self.session_type}", assignedCommittee:"", status:"", currentStatus:"", subject:"", instrumentSponsor:"", companionInstrumentNbr:"", effectiveDateCertain:"", effectiveDateOther:"", firstReadSecondBody:"", secondReadSecondBody:"", direction:"ASC"orderBy:"InstrumentNbr"limit:"{limit}"offset:"{offset}" search:"" customFilters: {{}}companionReport:"", ){{ ID,SessionYear,InstrumentNbr,InstrumentUrl, InstrumentSponsor,SessionType,Body,Subject,ShortTitle,AssignedCommittee,PrefiledDate,FirstRead,CurrentStatus,LastAction,ActSummary,ViewEnacted,CompanionInstrumentNbr,EffectiveDateCertain,EffectiveDateOther,InstrumentType,IntroducedUrl,EngrossedUrl,EnrolledUrl }}}}',
"operationName": "",
Expand All @@ -57,7 +55,9 @@ def scrape_bill_type(self, session, bill_type):
page = self.post(self.gql_url, headers=self.gql_headers, json=json_data)
page = json.loads(page.content)
if len(page["data"]["allInstrumentOverviews"]) < 1 and offset == 0:
raise EmptyScrape
# TODO: this fails if one chamber is empty and the other isn't
# raise EmptyScrape
return

for row in page["data"]["allInstrumentOverviews"]:
chamber = self.chamber_map[row["Body"]]
Expand Down

0 comments on commit 9508d84

Please sign in to comment.