You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
272 lines
15 KiB
272 lines
15 KiB
import datetime
|
|
from io import BytesIO
|
|
|
|
import pandas as pd
|
|
from django.contrib.gis.measure import Distance
|
|
from django.db.models import F
|
|
|
|
from postamates.settings import DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS, AGE_DAY_LIMIT
|
|
from service import models
|
|
from service.enums import PointStatus
|
|
from service.tasks import raschet
|
|
from service.utils import create_columns_dist
|
|
import base64
|
|
import requests
|
|
from postamates.settings import GEOCODER_API_KEY
|
|
from service.enums import MatchingStatus
|
|
from django.contrib.gis.db.models.functions import Distance as Dist
|
|
from django.db.models import Avg, Sum, Count
|
|
|
|
|
|
class PointService:
|
|
|
|
def update_fact(self, postamat_id: str, fact: int):
|
|
qs = self.get_point_by_postamat_id(postamat_id)
|
|
qs.update(**{'fact': fact})
|
|
|
|
def update_postamat_id(self, point_id: int, postamat_id: str):
|
|
qs = self.get_point_by_id(point_id)
|
|
qs.update(**{'postamat_id': postamat_id})
|
|
|
|
def start_mathing(self, obj_id: int):
|
|
file = models.TempFiles.objects.get(id=obj_id)
|
|
excel_file = base64.b64decode(file.data)
|
|
df = pd.read_excel(excel_file)
|
|
total = df.shape[0]
|
|
matched = 0
|
|
problem = 0
|
|
for _i, row in df.iterrows():
|
|
addr = row['Адрес']
|
|
cat = row['Категория объекта']
|
|
req_url = f"https://geocode.search.hereapi.com/v1/geocode?q={addr}&apiKey={GEOCODER_API_KEY}"
|
|
response = requests.get(req_url).json().get('items')
|
|
if not response:
|
|
models.PrePlacementPoint.objects.create(address=addr, matching_status=MatchingStatus.Error.name)
|
|
problem += 1
|
|
continue
|
|
coords = response[0]['position']
|
|
wkt = "POINT(" + str(coords['lng']) + " " + str(coords['lat']) + ")"
|
|
response = response[0]['address']
|
|
obj = models.PlacementPoint.objects.filter(street=response['street'], house_number=response['houseNumber'],
|
|
category=cat).values().first()
|
|
if obj:
|
|
obj.pop('id')
|
|
models.PrePlacementPoint.objects.create(**{**obj, "matching_status": MatchingStatus.Matched.name})
|
|
matched += 1
|
|
else:
|
|
models.PrePlacementPoint.objects.create(address=addr, street=response['street'],
|
|
house_number=response['houseNumber'],
|
|
category=cat, geometry=wkt,
|
|
matching_status=MatchingStatus.New.name)
|
|
models.TempFiles.objects.all().delete()
|
|
return total, matched, problem
|
|
|
|
@staticmethod
|
|
def make_enrichment():
|
|
points = models.PrePlacementPoint.objects.filter(matching_status=MatchingStatus.New.name).all()
|
|
for point in points:
|
|
origin = point.geometry
|
|
qs = models.PlacementPoint.objects.filter(status=PointStatus.Working.name).annotate(
|
|
dist=Dist('geometry', origin)).order_by('dist')
|
|
point.target_dist = qs[0].dist.m
|
|
point.target_post_cnt = qs.filter(
|
|
dist__lt=Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS)
|
|
).count()
|
|
point.target_cnt_ao_mean = qs[0].target_cnt_ao_mean
|
|
point.rival_post_cnt = models.Post_and_pvz.objects.filter(
|
|
category__name="Постамат", include_in_ml=True,
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.rival_pvz_cnt = models.Post_and_pvz.objects.filter(
|
|
category__name="ПВЗ", include_in_ml=True,
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.metro_dist = models.OtherObjects.objects.filter(group__name='metro_stations').annotate(
|
|
dist=Dist('wkt', origin)).order_by('dist')[0].dist.m
|
|
point.property_price_bargains = models.OtherObjects.objects.filter(
|
|
group__name="bargains",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(Avg('param1'))[
|
|
'param1__avg']
|
|
offers_estate = models.OtherObjects.objects.filter(
|
|
group__name="offers_estate",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(
|
|
param1__avg=Avg('param1'), param3__avg=Avg('param3'))
|
|
point.property_price_offers = offers_estate['param1__avg']
|
|
point.property_mean_floor = offers_estate['param3__avg']
|
|
point.property_era = models.OtherObjects.objects.filter(
|
|
group__name="offers_estate").values('param2').annotate(cnt=Count('param2')).order_by('-cnt').first()[
|
|
'param2']
|
|
point.flats_cnt = models.OtherObjects.objects.filter(
|
|
group__name="flats_cnt",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(
|
|
param1__sum=Sum('param1'))['param1__sum']
|
|
popul_home_job = models.OtherObjects.objects.filter(
|
|
group__name="popul_home_job",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(
|
|
param1__sum=Sum('param1'), param3__sum=Sum('param3'))
|
|
point.popul_home = popul_home_job['param1__sum']
|
|
point.popul_job = popul_home_job['param3__sum']
|
|
yndx_food_cnt_amt = models.OtherObjects.objects.filter(
|
|
group__name="yndx_food_cnt_amt",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(
|
|
param1__sum=Sum('param1'), param3__sum=Sum('param3'))
|
|
point.yndxfood_sum = yndx_food_cnt_amt['param1__sum']
|
|
point.yndxfood_cnt = yndx_food_cnt_amt['param3__sum']
|
|
point.school_cnt = models.OtherObjects.objects.filter(
|
|
group__name="schools",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.kindergar_cnt = models.OtherObjects.objects.filter(
|
|
group__name="kindergar",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.public_stop_cnt = models.OtherObjects.objects.filter(
|
|
group__name="stops",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.sport_center_cnt = models.OtherObjects.objects.filter(
|
|
group__name="sport_centers",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.pharmacy_cnt = models.OtherObjects.objects.filter(
|
|
group__name="pharmacies",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.supermarket_cnt = models.OtherObjects.objects.filter(
|
|
group__name="supermarkets",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.supermarket_premium_cnt = models.OtherObjects.objects.filter(
|
|
group__name="supermarkets_premium",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.clinic_cnt = models.OtherObjects.objects.filter(
|
|
group__name="clinics",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.bank_cnt = models.OtherObjects.objects.filter(
|
|
group__name="banks",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.reca_cnt = models.OtherObjects.objects.filter(
|
|
group__name="recas",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.lab_cnt = models.OtherObjects.objects.filter(
|
|
group__name="labs",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.culture_cnt = models.OtherObjects.objects.filter(
|
|
group__name="cultures",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.attraction_cnt = models.OtherObjects.objects.filter(
|
|
group__name="attractions",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.mfc_cnt = models.OtherObjects.objects.filter(
|
|
group__name="public_services",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.bc_cnt = models.OtherObjects.objects.filter(
|
|
group__name="BC",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.tc_cnt = models.OtherObjects.objects.filter(
|
|
group__name="TC",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).count()
|
|
point.business_activity = models.OtherObjects.objects.filter(
|
|
group__name="business_activity",
|
|
wkt__distance_lt=(origin, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS))).aggregate(
|
|
param1__sum=Sum('param1'))['param1__sum']
|
|
point.age_day = AGE_DAY_LIMIT
|
|
point.save()
|
|
|
|
|
|
@staticmethod
|
|
def get_min_distances_to_group(postamat_id: str):
|
|
return {d['pvz_postamates_group']: d['dist'] for d in list(
|
|
models.PlacementPointPVZDistance.objects.filter(placement_point=postamat_id).values(
|
|
'pvz_postamates_group', 'dist'))}
|
|
|
|
@staticmethod
|
|
def update_points_in_radius(qs: models.PlacementPoint, new_status: str):
|
|
triggers = False
|
|
for point in qs:
|
|
if new_status == PointStatus.Installation.name:
|
|
if point.status == PointStatus.Pending.name:
|
|
pnts = models.PlacementPoint.objects.filter(
|
|
geometry__distance_lt=(point.geometry, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS)),
|
|
)
|
|
pnts.update(prediction_first=F('prediction_current'), target_post_cnt=F('target_post_cnt') + 1)
|
|
triggers = True
|
|
elif new_status == PointStatus.Cancelled.name or new_status == PointStatus.Pending.name:
|
|
if point.status == PointStatus.Installation.name:
|
|
pnts = models.PlacementPoint.objects.filter(
|
|
geometry__distance_lt=(point.geometry, Distance(m=DEFAULT_PLACEMENT_POINT_UPDATE_RADIUS)),
|
|
)
|
|
pnts.update(target_post_cnt=F('target_post_cnt') - 1 if F('target_post_cnt') != 0 else 0)
|
|
triggers = True
|
|
elif new_status == PointStatus.Working.name and point.status == PointStatus.Pending.name:
|
|
triggers = True
|
|
if triggers:
|
|
raschet.delay()
|
|
|
|
@staticmethod
|
|
def update_status(qs: models.PlacementPoint, new_status: str) -> models.PlacementPoint:
|
|
for q in qs:
|
|
if q.status == PointStatus.Installation.name and new_status == PointStatus.Working.name:
|
|
qs.update(**{'age_day': 0, 'start_date': datetime.datetime.now(), 'status': new_status})
|
|
else:
|
|
qs.update(**{'status': new_status})
|
|
|
|
@staticmethod
|
|
def get_point_by_id(point_id: int):
|
|
return models.PlacementPoint.objects.filter(pk=point_id)
|
|
|
|
@staticmethod
|
|
def get_point_by_postamat_id(postamat_id: str):
|
|
return models.PlacementPoint.objects.filter(postamat_id=postamat_id)
|
|
|
|
@staticmethod
|
|
def to_excel(serializer):
|
|
data = pd.DataFrame(serializer.data)
|
|
if not data.empty:
|
|
if data['start_date'].any():
|
|
data['start_date'] = data.get('start_date').dt.tz_localize(None)
|
|
if data['sample_trn'].any():
|
|
data['sample_trn'] = data['sample_trn'].astype(int)
|
|
data.rename(columns={'district_id': 'district', 'area_id': 'area'}, inplace=True)
|
|
data['min_distance_to_group'] = data['min_distance_to_group'].apply(lambda x: list(x.items()))
|
|
new_columns = data.apply(create_columns_dist, axis=1)
|
|
for ind in new_columns.columns:
|
|
expanded = new_columns[ind].apply(pd.Series)
|
|
group = models.Post_and_pvzGroup.objects.get(id=int(expanded.loc[0, 0]))
|
|
expanded[[f"group_{ind + 1}_name", f"group_{ind + 1}_category"]] = group.name, group.category.name
|
|
expanded = expanded.rename(columns={1: f"dist_to_group_{ind + 1}"})
|
|
expanded = expanded.drop(0, axis=1)
|
|
new_columns = pd.concat([new_columns, expanded], axis=1)
|
|
new_columns = new_columns.drop(ind, axis=1)
|
|
data.drop('min_distance_to_group', axis=1, inplace=True)
|
|
data = pd.concat([data, new_columns], axis=1)
|
|
with BytesIO() as b:
|
|
with pd.ExcelWriter(b) as writer:
|
|
data.to_excel(
|
|
writer, sheet_name='Placement Points',
|
|
index=False,
|
|
)
|
|
return b.getvalue()
|
|
|
|
@staticmethod
|
|
def to_json(serializer):
|
|
data = pd.DataFrame(serializer.data)
|
|
data['start_date'] = pd.to_datetime(data['start_date'], errors='coerce')
|
|
data['start_date'] = data['start_date'].dt.tz_localize(None)
|
|
data['sample_trn'] = data['sample_trn'].astype(int)
|
|
data['geometry'] = data['geometry'].apply(lambda x: {'latitude': x[1], 'longtitude': x[0]})
|
|
data.rename(columns={'district_id': 'district', 'area_id': 'area'}, inplace=True)
|
|
data['min_distance_to_group'] = data['min_distance_to_group'].apply(lambda x: list(x.items()))
|
|
new_columns = data.apply(create_columns_dist, axis=1)
|
|
for ind in new_columns.columns:
|
|
expanded = new_columns[ind].apply(pd.Series)
|
|
group = models.Post_and_pvzGroup.objects.get(id=int(expanded.loc[0, 0]))
|
|
expanded[[f"group_{ind + 1}_name", f"group_{ind + 1}_category"]] = group.name, group.category.name
|
|
expanded = expanded.rename(columns={1: f"dist_to_group_{ind + 1}"})
|
|
expanded = expanded.drop(0, axis=1)
|
|
new_columns = pd.concat([new_columns, expanded], axis=1)
|
|
new_columns = new_columns.drop(ind, axis=1)
|
|
data.drop('min_distance_to_group', axis=1, inplace=True)
|
|
data = pd.concat([data, new_columns], axis=1)
|
|
return data.to_json(orient='records')
|
|
|
|
@staticmethod
|
|
def get_first_10_k():
|
|
if models.PlacementPoint.objects.count() > 10000:
|
|
qs = models.PlacementPoint.objects.order_by('-prediction_current').all()[10000]
|
|
return qs.prediction_current
|
|
else:
|
|
return models.PlacementPoint.objects.order_by('prediction_current').first().prediction_current
|