2023-11-30 14:41:44 -08:00
|
|
|
import datetime
|
|
|
|
|
from zoneinfo import ZoneInfo
|
|
|
|
|
|
2019-02-14 14:25:31 +00:00
|
|
|
from app.extensions import redis_client
|
2018-07-20 10:18:51 +01:00
|
|
|
from app.notify_client import NotifyAdminAPIClient, _attach_current_user, cache
|
2023-11-30 14:41:44 -08:00
|
|
|
from app.utils.csv import get_user_preferred_timezone
|
2016-01-29 10:27:23 +00:00
|
|
|
|
|
|
|
|
|
2016-11-30 17:01:44 +00:00
|
|
|
class JobApiClient(NotifyAdminAPIClient):
|
2016-10-11 10:35:33 +01:00
|
|
|
JOB_STATUSES = {
|
2023-08-25 09:12:23 -07:00
|
|
|
"scheduled",
|
|
|
|
|
"pending",
|
|
|
|
|
"in progress",
|
|
|
|
|
"finished",
|
|
|
|
|
"cancelled",
|
|
|
|
|
"sending limits exceeded",
|
|
|
|
|
"ready to send",
|
|
|
|
|
"sent to dvla",
|
2016-10-11 10:35:33 +01:00
|
|
|
}
|
2023-08-25 09:12:23 -07:00
|
|
|
SCHEDULED_JOB_STATUS = "scheduled"
|
|
|
|
|
CANCELLED_JOB_STATUS = "cancelled"
|
2020-05-12 17:31:01 +01:00
|
|
|
NON_CANCELLED_JOB_STATUSES = JOB_STATUSES - {CANCELLED_JOB_STATUS}
|
2023-08-25 09:12:23 -07:00
|
|
|
NON_SCHEDULED_JOB_STATUSES = JOB_STATUSES - {
|
|
|
|
|
SCHEDULED_JOB_STATUS,
|
|
|
|
|
CANCELLED_JOB_STATUS,
|
|
|
|
|
}
|
2018-07-20 10:18:51 +01:00
|
|
|
|
2016-09-22 09:53:33 +01:00
|
|
|
def get_job(self, service_id, job_id):
|
|
|
|
|
params = {}
|
2024-08-02 14:50:52 -04:00
|
|
|
job = self.get(url=f"/service/{service_id}/job/{job_id}", params=params)
|
2016-09-05 14:29:58 +01:00
|
|
|
|
2016-09-22 09:53:33 +01:00
|
|
|
return job
|
2016-08-23 16:58:50 +01:00
|
|
|
|
2023-04-12 15:35:14 -04:00
|
|
|
def get_jobs(self, service_id, *, limit_days=None, statuses=None, page=1):
|
2023-08-25 09:12:23 -07:00
|
|
|
params = {"page": page}
|
2016-05-25 12:04:51 +01:00
|
|
|
if limit_days is not None:
|
2023-08-25 09:12:23 -07:00
|
|
|
params["limit_days"] = limit_days
|
2016-10-05 09:52:31 +01:00
|
|
|
if statuses is not None:
|
2023-08-25 09:12:23 -07:00
|
|
|
params["statuses"] = ",".join(statuses)
|
2016-08-23 16:58:50 +01:00
|
|
|
|
2024-08-02 14:02:44 -04:00
|
|
|
job = self.get(url=f"/service/{service_id}/job", params=params)
|
|
|
|
|
return job
|
2016-01-29 15:35:35 +00:00
|
|
|
|
2019-12-05 16:14:30 +00:00
|
|
|
def get_uploads(self, service_id, limit_days=None, page=1):
|
2023-08-25 09:12:23 -07:00
|
|
|
params = {"page": page}
|
2019-12-05 16:14:30 +00:00
|
|
|
if limit_days is not None:
|
2023-08-25 09:12:23 -07:00
|
|
|
params["limit_days"] = limit_days
|
2024-08-02 14:02:44 -04:00
|
|
|
return self.get(url=f"/service/{service_id}/upload", params=params)
|
2019-12-05 16:14:30 +00:00
|
|
|
|
2023-08-25 09:12:23 -07:00
|
|
|
def has_sent_previously(
|
|
|
|
|
self, service_id, template_id, template_version, original_file_name
|
|
|
|
|
):
|
|
|
|
|
return (template_id, template_version, original_file_name) in (
|
2019-02-04 14:08:11 +00:00
|
|
|
(
|
2023-08-25 09:12:23 -07:00
|
|
|
job["template"],
|
|
|
|
|
job["template_version"],
|
|
|
|
|
job["original_file_name"],
|
2019-02-04 14:08:11 +00:00
|
|
|
)
|
2023-08-25 09:12:23 -07:00
|
|
|
for job in self.get_jobs(service_id, limit_days=0)["data"]
|
|
|
|
|
if job["job_status"] != "cancelled"
|
2019-02-04 14:08:11 +00:00
|
|
|
)
|
|
|
|
|
|
2023-04-12 15:35:14 -04:00
|
|
|
def get_page_of_jobs(self, service_id, *, page, statuses=None, limit_days=None):
|
2018-07-20 10:18:51 +01:00
|
|
|
return self.get_jobs(
|
|
|
|
|
service_id,
|
2020-05-12 17:31:01 +01:00
|
|
|
statuses=statuses or self.NON_SCHEDULED_JOB_STATUSES,
|
2018-07-20 10:18:51 +01:00
|
|
|
page=page,
|
2020-12-01 13:34:31 +00:00
|
|
|
limit_days=limit_days,
|
2018-07-20 10:18:51 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def get_immediate_jobs(self, service_id):
|
|
|
|
|
return self.get_jobs(
|
|
|
|
|
service_id,
|
|
|
|
|
limit_days=7,
|
2018-07-31 09:57:55 +01:00
|
|
|
statuses=self.NON_SCHEDULED_JOB_STATUSES,
|
2023-08-25 09:12:23 -07:00
|
|
|
)["data"]
|
2018-07-20 10:18:51 +01:00
|
|
|
|
|
|
|
|
def get_scheduled_jobs(self, service_id):
|
|
|
|
|
return sorted(
|
2023-08-25 09:12:23 -07:00
|
|
|
self.get_jobs(service_id, statuses=[self.SCHEDULED_JOB_STATUS])["data"],
|
|
|
|
|
key=lambda job: job["scheduled_for"],
|
2020-03-06 09:45:02 +00:00
|
|
|
reverse=True,
|
2018-07-20 10:18:51 +01:00
|
|
|
)
|
|
|
|
|
|
2020-09-28 14:28:23 +01:00
|
|
|
def get_scheduled_job_stats(self, service_id):
|
2023-08-25 09:12:23 -07:00
|
|
|
return self.get(url=f"/service/{service_id}/job/scheduled-job-stats")
|
2020-09-28 14:28:23 +01:00
|
|
|
|
2023-08-25 09:12:23 -07:00
|
|
|
@cache.set("has_jobs-{service_id}")
|
2018-07-20 10:18:51 +01:00
|
|
|
def has_jobs(self, service_id):
|
2023-08-25 09:12:23 -07:00
|
|
|
return bool(self.get_jobs(service_id)["data"])
|
2018-07-20 10:18:51 +01:00
|
|
|
|
2023-11-30 14:41:44 -08:00
|
|
|
@classmethod
|
|
|
|
|
def convert_user_time_to_utc(cls, scheduled_for):
|
|
|
|
|
user_preferred_tz = get_user_preferred_timezone()
|
|
|
|
|
|
|
|
|
|
user_date = datetime.datetime.fromisoformat(scheduled_for)
|
|
|
|
|
scheduled_for = (
|
|
|
|
|
user_date.replace(tzinfo=ZoneInfo(user_preferred_tz))
|
|
|
|
|
.astimezone(ZoneInfo("UTC"))
|
|
|
|
|
.strftime("%Y-%m-%dT%H:%M:%S")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return scheduled_for
|
|
|
|
|
|
2023-12-14 13:24:34 -08:00
|
|
|
def create_job(
|
|
|
|
|
self,
|
|
|
|
|
job_id,
|
|
|
|
|
service_id,
|
|
|
|
|
scheduled_for=None,
|
|
|
|
|
template_id=None,
|
|
|
|
|
original_file_name=None,
|
|
|
|
|
notification_count=None,
|
|
|
|
|
valid=None,
|
|
|
|
|
):
|
2018-04-30 12:07:36 +01:00
|
|
|
data = {"id": job_id}
|
2016-09-05 13:39:03 +01:00
|
|
|
|
2023-11-30 14:41:44 -08:00
|
|
|
# make a datetime object in the user's preferred timezone
|
|
|
|
|
|
2016-09-05 13:39:03 +01:00
|
|
|
if scheduled_for:
|
2023-11-30 14:48:06 -08:00
|
|
|
scheduled_for = JobApiClient.convert_user_time_to_utc(scheduled_for)
|
2023-12-20 10:26:14 -08:00
|
|
|
data["scheduled_for"] = scheduled_for
|
2016-09-05 13:39:03 +01:00
|
|
|
|
2023-12-14 13:24:34 -08:00
|
|
|
if template_id:
|
2023-12-20 10:26:14 -08:00
|
|
|
data["template_id"] = template_id
|
2023-12-14 13:24:34 -08:00
|
|
|
if original_file_name:
|
2023-12-20 10:26:14 -08:00
|
|
|
data["original_file_name"] = original_file_name
|
2023-12-14 13:24:34 -08:00
|
|
|
if notification_count:
|
2023-12-20 10:26:14 -08:00
|
|
|
data["notification_count"] = notification_count
|
2023-12-14 13:24:34 -08:00
|
|
|
if valid:
|
2023-12-20 10:26:14 -08:00
|
|
|
data["valid"] = valid
|
2023-12-14 13:24:34 -08:00
|
|
|
|
2016-08-11 14:20:43 +01:00
|
|
|
data = _attach_current_user(data)
|
2023-08-25 09:12:23 -07:00
|
|
|
job = self.post(url="/service/{}/job".format(service_id), data=data)
|
2016-08-23 16:58:50 +01:00
|
|
|
|
2022-09-08 09:07:54 -07:00
|
|
|
redis_client.set(
|
2023-08-25 09:12:23 -07:00
|
|
|
"has_jobs-{}".format(service_id),
|
|
|
|
|
b"true",
|
2022-09-08 09:07:54 -07:00
|
|
|
ex=int(cache.DEFAULT_TTL),
|
|
|
|
|
)
|
2019-07-24 14:56:17 +01:00
|
|
|
|
2016-08-24 10:35:04 +01:00
|
|
|
return job
|
2016-09-01 15:40:49 +01:00
|
|
|
|
2023-08-25 09:12:23 -07:00
|
|
|
@cache.delete("has_jobs-{service_id}")
|
2016-09-01 15:40:49 +01:00
|
|
|
def cancel_job(self, service_id, job_id):
|
2020-01-08 16:32:08 +00:00
|
|
|
return self.post(
|
2023-08-25 09:12:23 -07:00
|
|
|
url="/service/{}/job/{}/cancel".format(service_id, job_id), data={}
|
2016-09-01 15:40:49 +01:00
|
|
|
)
|
|
|
|
|
|
2018-10-26 15:39:32 +01:00
|
|
|
|
|
|
|
|
job_api_client = JobApiClient()
|