job-scraper/scrapers/ashby.py
Bastian Gruber e8eb9d3fcf
Initial commit: Job scraper for privacy/open-source companies
- Scrapes job listings from Greenhouse, Lever, and Ashby platforms
- Tracks 14 companies (1Password, DuckDuckGo, GitLab, etc.)
- SQLite database for change detection
- Filters by engineering job titles and location preferences
- Generates static HTML dashboard with search/filter
- Docker support for deployment to Debian server
2026-01-20 12:40:33 -04:00

51 lines
1.5 KiB
Python

from .base import BaseScraper, Job
class AshbyScraper(BaseScraper):
"""
Scraper for companies using Ashby.
Ashby provides a JSON API endpoint.
Example: https://api.ashbyhq.com/posting-api/job-board/{company}
"""
def __init__(self, company_name: str, ashby_company: str, **kwargs):
# Ashby API endpoint
jobs_url = f"https://api.ashbyhq.com/posting-api/job-board/{ashby_company}"
super().__init__(company_name, jobs_url, **kwargs)
self.ashby_company = ashby_company
def scrape(self) -> list[Job]:
"""Scrape jobs from Ashby API."""
data = self.fetch_json()
jobs = []
for job_data in data.get("jobs", []):
job_id = job_data.get("id", "")
title = job_data.get("title", "")
job_url = job_data.get("jobUrl", "")
# Location info
location = job_data.get("location", "")
department = job_data.get("department", "")
# Employment type
employment_type = job_data.get("employmentType", "")
# Check for remote
is_remote = job_data.get("isRemote", False)
if is_remote:
remote_type = "remote"
else:
remote_type = self.classify_remote(location)
jobs.append(Job(
external_id=job_id,
title=title,
url=job_url,
location=location,
department=department,
remote_type=remote_type
))
return jobs