'Save data into mutli model uising django orm join
I am facing a problem with Django I have 2 models"batch", "batchyeild",
these 3 have different forms and users enter data into it Now I am creating another form "History" In which the user will enter the data but In my backend, the data which will come from "history" will be distributed in these 3 model tables DB
Now the payload requested data of history is
batch_status
commodity name
pesticide
actual produce
acerage
variety_id
end_date
I will be distributively saved in 2 models
the batch has
batch_status
commodity name
pesticide
actual produce
average
variety_id
and batchyeild has columns
end_date
the viewof both looks like
class BatchViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny]
serializer_class = BatchSerializer
queryset = Batch.objects.all()
http_method_names = ['get', 'post', 'patch', 'delete']
pagination_class = GeneralPagination
filterset_fields = ['farm_id', 'batch_status']
def create(self, request, *args, **kwargs):
"""
This function is used to create/update Batch objects for the provided list based on batch_id
"""
# todo to be optimised.
# get the user data in logged_in_user_data from the request headers decoding the authorization key
header = {'Content-Type': 'application/json', 'Authorization': request.headers['Authorization']}
logged_in_user_data = get_user_data(request.headers['Authorization'])
# # user_id will contain the user id of the logged in user
user_id = logged_in_user_data.get('secondary_user_tbl_id')
for data in request.data:
data['updated_by_id'] = user_id
serializer = BatchSerializer(data=data, context={"request": request})
if serializer.is_valid():
try:
batch_id = data.get('batch_id')
farm_id = data.get('farm_id')
farm_data = Farm.objects.filter(id=farm_id).values('farmer_id', 'total_acerage')
farmer_id = farm_data[0]['farmer_id']
request_acerage = float(data['acerage'])
farm_acerage = float(farm_data[0]['total_acerage'])
if farm_acerage in (None, 0):
return Response({"error": True, "message": "Add farm first"},
status=status.HTTP_200_OK)
if not batch_id:
batch_acerage = Batch.objects.filter(farm_id=farm_id) \
.filter(Q(batch_status='running') | Q(batch_status='to_start')).aggregate(
sum_acerage=Sum('acerage'))
batch_acerage = batch_acerage['sum_acerage']
if batch_acerage in (None, 0):
if float(request_acerage) > farm_acerage:
return Response({"error": True, "message": "Cannot add batch more than farm capacity"},
status=status.HTTP_200_OK)
else:
batch_acerage = float(batch_acerage) + float(request_acerage)
if batch_acerage > farm_acerage:
return Response({"error": True, "message": "Cannot add batch more than farm capacity"},
status=status.HTTP_200_OK)
batch_obj = serializer.save(created_by_id=user_id)
batch_id = serializer.data["id"]
payload = {
"farmer_id": farmer_id,
"sub_farmer_id": serializer.data["sub_farmer_id"] if serializer.data[
"sub_farmer_id"] else farmer_id,
"employee_id": serializer.data["created_by_id"],
"batch_id": batch_id,
"room_type": "BATCH",
"fpo_id": "34",
"commodity_name": serializer.data["commodity_name"],
"acerage": serializer.data["acerage"],
"start_date": serializer.data["start_date"]
}
response = requests.post(url=settings.CREATE_ROOM_URL, data=json.dumps(payload),
headers=header)
sop_tag_name = '{}_{}'.format(batch_obj.farm_id.region_name,
serializer.data['commodity_name']).lower().replace(
" ", "_")
sop_master_list = SOPMaster.objects.filter(sop_tag_name=sop_tag_name).order_by('sequence')
if len(sop_master_list) == 0:
sop_tag_name = '{}_{}'.format('ALL',
serializer.data['commodity_name']).lower().replace(
" ", "_")
sop_master_list = SOPMaster.objects.filter(sop_tag_name=sop_tag_name).order_by('sequence')
batch_sop_list = []
task_assign_date = None
total_days = 0
task_day = None
for sop_master in sop_master_list:
total_days = total_days + sop_master.day
if batch_obj.start_date and sop_master.day and sop_master.duration:
start_date = datetime.combine(batch_obj.start_date,
datetime.strptime('1200', '%H%M').time())
hours = (sop_master.day * 24) + sop_master.duration
str_datetime = start_date + relativedelta(hours=hours)
if not task_assign_date:
task_assign_date = batch_obj.start_date.strftime('%Y-%m-%d')
task_day = sop_master.day
else:
if sop_master.day > task_day:
task_day = sop_master.day
task_assign_date = pd.to_datetime(
batch_obj.start_date.strftime('%Y-%m-%d')) + pd.DateOffset(
days=task_day - 1)
else:
str_datetime = None
batch_sop_list.append(
BatchSOPManagement(task_id=sop_master.task_id, batch_id=batch_obj,
sop_tag_name=sop_tag_name,
sequence=sop_master.sequence, day=sop_master.day,
weightage=sop_master.weightage,
duration=sop_master.duration, current_status=0,
due_datetime=str_datetime, task_assign_date=task_assign_date))
if batch_obj.start_date:
Batch.objects.filter(id=batch_id).update(end_date=str_datetime.strftime('%Y-%m-%d'))
BatchSOPManagement.objects.bulk_create(batch_sop_list)
solr_delta_import(is_farm=False, model_name="batchsopmanagement")
else:
batch_current_acerage = Batch.objects.filter(farm_id=farm_id)\
.filter(Q(batch_status='running') | Q(batch_status='to_start')).exclude(id=batch_id)\
.aggregate(sum_acerage=Sum('acerage'))
batch_current_acerage = batch_current_acerage['sum_acerage']
if batch_current_acerage:
batch_current_acerage = float(batch_current_acerage) + request_acerage
else:
batch_current_acerage = request_acerage
if batch_current_acerage > farm_acerage:
return Response({"error": True, "message": "Cannot add batch more than farm capacity"},
status=status.HTTP_200_OK)
instance = Batch.objects.get(id=batch_id, farm_id=farm_id)
instance.__dict__.update(**serializer.data)
instance.save()
except Exception as e:
print(e)
else:
continue
solr_delta_import()
serializer = BatchSerializer(data=request.data, context={"request": request}, many=True)
# todo see how to remove this.
if serializer.is_valid():
return Response({"response": "success"}, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, *args, **kwargs):
"""
This function is used to destroy batch object
"""
try:
instance = self.get_object()
instance.batch_status = 'aborted'
instance.save()
solr_delta_import()
except Exception as e:
return Response({"response": str(e)}, status=status.HTTP_400_BAD_REQUEST)
return Response({"response": "deleted successfully"}, status=status.HTTP_200_OK)
class BatchYieldViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.AllowAny]
serializer_class = BatchYieldSerializer
queryset = BatchYield.objects.all()
http_method_names = ['get', 'post', 'patch', 'delete']
pagination_class = GeneralPagination
filterset_fields = ['batch_id']
def create(self, request, *args, **kwargs):
"""
This function is used to create/update Batch objects for the provided list based on batch_id
"""
# get the user data in logged_in_user_data from the request headers decoding the authorization key
logged_in_user_data = get_user_data(request.headers['Authorization'])
# user_id will contain the user id of the logged in user
user_id = logged_in_user_data.get('secondary_user_tbl_id')
for data in request.data:
data['updated_by_id'] = user_id
serializer = BatchYieldSerializer(data=data, context={"request": request})
if serializer.is_valid():
try:
batch_id = data.get('batch_id')
id = data.get('id')
if not id:
serializer.save(created_by_id=user_id)
else:
instance = BatchYield.objects.get(id=id, batch_id=batch_id)
instance.__dict__.update(**serializer.data)
instance.save()
except Exception as e:
pass
else:
continue
solr_delta_import()
# todo see how to remove this.
serializer = BatchYieldSerializer(data=request.data, context={"request": request}, many=True)
if serializer.is_valid():
return Response({"response": "success"}, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, *args, **kwargs):
"""
This function is used to destroy batch yield object
"""
try:
instance = self.get_object()
instance.is_active = False
instance.save()
# solr_delta_import()
except Exception as e:
return Response({"response": str(e)}, status=status.HTTP_400_BAD_REQUEST)
return Response({"response": "deleted successfully"}, status=status.HTTP_200_OK)
and serializer of both,model looks like
class BatchSerializer(serializers.ModelSerializer):
"""
This Serializer is of Batch Model for Get, Post, Patch and Delete method.
Request Body for Post
[{
"acerage": 75, "batch_health": 75, "farm_id":6
}]
Example Response
[{
"id": 14, "start_date": null, "acerage": 75.0, "batch_health": 75, "commodity_id": null,
"commodity_variety_id": null, "stage": "germination", "farm_id": 6, "expected_delivery_date": null,
"current_pdd": null, "historic_pdd": null, "current_gdd": null, "historic_gdd": null,
"sub_farmer_id": null, "batch_status": "to_start", "updated_at": "2021-07-20T06:48:44.027868Z",
"created_at": "2021-07-20T06:48:44.027896Z", "updated_by_id": 45, "created_by_id": 45
}]
"""
farm_id = serializers.PrimaryKeyRelatedField(queryset=Farm.objects.all())
commodity_id = serializers.IntegerField()
# commodity_variety_id = serializers.IntegerField()
commodity_name = serializers.CharField(read_only=True)
batch_name = serializers.CharField(read_only=True)
batch_median_health = serializers.IntegerField(read_only=True)
class Meta:
model = Batch
fields = ['id', 'batch_name', 'start_date', 'acerage', 'batch_health', 'commodity_id', 'commodity_variety_id',
'stage', 'farm_id', 'expected_delivery_date', 'current_pdd', 'historic_pdd', 'current_gdd',
'historic_gdd', 'sub_farmer_id', 'batch_status', 'updated_at', 'created_at', 'updated_by_id',
'created_by_id', 'commodity_name', 'historical_yield_per_acre', 'expected_produce', 'actual_produce',
'sop_adherence', 'actual_yield_per_acre', 'batch_median_health', 'end_date', 'pending_tasks']
def to_representation(self, instance):
result = super().to_representation(instance)
return OrderedDict([(key, result[key]) for key in result if result[key] is not None])
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|
