|
|
|
|
@ -9,6 +9,7 @@ from postamates.settings import DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS
|
|
|
|
|
from service import models
|
|
|
|
|
from service.enums import PointStatus
|
|
|
|
|
from service.tasks import raschet
|
|
|
|
|
from service.utils import create_columns_dist
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class LayerService:
|
|
|
|
|
@ -18,6 +19,7 @@ class LayerService:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class PointService:
|
|
|
|
|
|
|
|
|
|
def update_fact(self, postamat_id: str, fact: int):
|
|
|
|
|
qs = self.get_point_by_postamat_id(postamat_id)
|
|
|
|
|
qs.update(**{'fact': fact})
|
|
|
|
|
@ -26,6 +28,12 @@ class PointService:
|
|
|
|
|
qs = self.get_point_by_id(point_id)
|
|
|
|
|
qs.update(**{'postamat_id': postamat_id})
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def get_min_distances_to_group(postamat_id: str):
|
|
|
|
|
return {d['pvz_postamates_group']: d['dist'] for d in list(
|
|
|
|
|
models.PlacementPointPVZDistance.objects.filter(placement_point=postamat_id).values(
|
|
|
|
|
'pvz_postamates_group', 'dist'))}
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def update_points_in_radius(qs: models.PlacementPoint, new_status: str):
|
|
|
|
|
triggers = False
|
|
|
|
|
@ -66,13 +74,25 @@ class PointService:
|
|
|
|
|
return models.PlacementPoint.objects.filter(postamat_id=postamat_id)
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def to_excel(qs: models.PlacementPoint):
|
|
|
|
|
data = pd.DataFrame(list(qs.values()))
|
|
|
|
|
def to_excel(serializer):
|
|
|
|
|
data = pd.DataFrame(serializer.data)
|
|
|
|
|
if not data.empty:
|
|
|
|
|
if data['start_date'].any():
|
|
|
|
|
data['start_date'] = data.get('start_date').dt.tz_localize(None)
|
|
|
|
|
data['sample_trn'] = data['sample_trn'].astype(int)
|
|
|
|
|
data.rename(columns={'district_id': 'district', 'area_id': 'area'}, inplace=True)
|
|
|
|
|
data['min_distance_to_group'] = data['min_distance_to_group'].apply(lambda x: list(x.items()))
|
|
|
|
|
new_columns = data.apply(create_columns_dist, axis=1)
|
|
|
|
|
for ind in new_columns.columns:
|
|
|
|
|
expanded = new_columns[ind].apply(pd.Series)
|
|
|
|
|
group = models.Post_and_pvzGroup.objects.get(id=int(expanded.loc[0, 0]))
|
|
|
|
|
expanded[[f"group_{ind + 1}_name", f"group_{ind + 1}_category"]] = group.name, group.category.name
|
|
|
|
|
expanded = expanded.rename(columns={1: f"dist_to_group_{ind + 1}"})
|
|
|
|
|
expanded = expanded.drop(0, axis=1)
|
|
|
|
|
new_columns = pd.concat([new_columns, expanded], axis=1)
|
|
|
|
|
new_columns = new_columns.drop(ind, axis=1)
|
|
|
|
|
data.drop('min_distance_to_group', axis=1, inplace=True)
|
|
|
|
|
data = pd.concat([data, new_columns], axis=1)
|
|
|
|
|
with BytesIO() as b:
|
|
|
|
|
with pd.ExcelWriter(b) as writer:
|
|
|
|
|
data.to_excel(
|
|
|
|
|
@ -82,13 +102,25 @@ class PointService:
|
|
|
|
|
return b.getvalue()
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def to_json(qs: models.PlacementPoint):
|
|
|
|
|
data = pd.DataFrame(list(qs.values()))
|
|
|
|
|
def to_json(serializer):
|
|
|
|
|
data = pd.DataFrame(serializer.data)
|
|
|
|
|
data['start_date'] = pd.to_datetime(data['start_date'], errors='coerce')
|
|
|
|
|
data['start_date'] = data['start_date'].dt.tz_localize(None)
|
|
|
|
|
data['sample_trn'] = data['sample_trn'].astype(int)
|
|
|
|
|
data['geometry'] = data['geometry'].apply(lambda x: {'latitude': x[1], 'longtitude': x[0]})
|
|
|
|
|
data.rename(columns={'district_id': 'district', 'area_id': 'area'}, inplace=True)
|
|
|
|
|
data['min_distance_to_group'] = data['min_distance_to_group'].apply(lambda x: list(x.items()))
|
|
|
|
|
new_columns = data.apply(create_columns_dist, axis=1)
|
|
|
|
|
for ind in new_columns.columns:
|
|
|
|
|
expanded = new_columns[ind].apply(pd.Series)
|
|
|
|
|
group = models.Post_and_pvzGroup.objects.get(id=int(expanded.loc[0, 0]))
|
|
|
|
|
expanded[[f"group_{ind + 1}_name", f"group_{ind + 1}_category"]] = group.name, group.category.name
|
|
|
|
|
expanded = expanded.rename(columns={1: f"dist_to_group_{ind + 1}"})
|
|
|
|
|
expanded = expanded.drop(0, axis=1)
|
|
|
|
|
new_columns = pd.concat([new_columns, expanded], axis=1)
|
|
|
|
|
new_columns = new_columns.drop(ind, axis=1)
|
|
|
|
|
data.drop('min_distance_to_group', axis=1, inplace=True)
|
|
|
|
|
data = pd.concat([data, new_columns], axis=1)
|
|
|
|
|
return data.to_json(orient='records')
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
|