Conversion_Kitchen_Code/kitchen_counter/conversion/management/commands/custom_runserver.py

465 lines
18 KiB
Python
Raw Normal View History

2024-04-30 04:59:37 +00:00
import socket
import time
import boto3
import subprocess
import json
2024-05-11 10:04:00 +00:00
import os
import pandas as pd
2024-04-30 04:59:37 +00:00
import threading
from django.core.management.base import BaseCommand
from django.core.management import call_command
from mnfapp.models import ScriptTranslations
from MNF.settings import BasePath
from conversion.translation.external_conversion import Conversion
2024-04-30 07:33:51 +00:00
from centralisedFileSystem.models import Script, File
from scriptAudit.models import ScriptAuditModel
from scriptAudit.models import States
2024-05-02 12:38:32 +00:00
from utils import filesystem, utilities
2024-04-30 07:33:51 +00:00
from scriptAudit.utils import update_audit_status
from scriptAudit.mnf_script_audit import NeutralAudit
from django.core.files.base import ContentFile
from django.contrib.auth import get_user_model
2024-05-02 12:38:32 +00:00
from users.models import UserCredentialsForBlockchain
from django.core.files.base import File as DjangoFile
import tempfile
from io import BytesIO
2024-05-11 02:26:37 +00:00
from auto_email.views import sendmail
from lpp.certificate.createCertificate import certificateGenrate
2024-04-30 04:59:37 +00:00
basePath = BasePath()
2024-04-30 07:33:51 +00:00
User = get_user_model()
2024-04-30 04:59:37 +00:00
def background_execution(obj):
obj.convert()
2024-04-30 07:33:51 +00:00
def run_conversion(msg):
body_dict = json.loads(msg.body)
2024-05-13 08:47:04 +00:00
msg.delete()
2024-04-30 07:33:51 +00:00
translated_script = ScriptTranslations.objects.get(translation_id=body_dict["translation_id"])
object_key = "INPUT/" + (translated_script.script_link_id.script.name.split("/"))[-1]
local_file_path = "/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/" + translated_script.script_link_id.script.name
s3_client = boto3.client('s3',
aws_access_key_id="AKIAQVLBBGCB45RMLKVW",
aws_secret_access_key="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",
region_name="ap-south-1"
)
s3_client.download_file("conversion-kitchen", object_key, local_file_path)
conversion_params = {
"user": translated_script.user_id,
"file_path": str(basePath) + "/media/" + translated_script.script_link_id.script.name,
"original_script_id": translated_script.script_link_id.script_id,
"translated_script_id": translated_script.translation_id,
"sample_id": body_dict.get('sample_id', None),
"existing_script": body_dict.get('sample_id', None),
"iteration_no": body_dict.get('iteration_no', None),
"juggernaut_pages_deduction": body_dict.get("juggernaut_pages_deduction", None),
"language_set": body_dict.get("language_set", None),
"amount_without_subscrption": body_dict.get("amount_without_subscrption", None)
}
print("reached here")
obj = Conversion(**conversion_params)
# obj.convert()
background_thread = threading.Thread(target=background_execution, args=(obj,))
background_thread.start()
background_thread.join()
2024-05-02 07:48:38 +00:00
s3_client.delete_object(Bucket='conversion-kitchen', Key=object_key)
2024-04-30 07:33:51 +00:00
def run_audit(msg):
2024-05-02 12:38:32 +00:00
from Blockchain2.decryption import decryptionOfPrivate, decryptionOfUrl, download_file_System,decryptionOflocalUrl,hash_decrypation
from Blockchain2.DataStorage import uploadDataToIPFSNode
from Blockchain2.scriptAudit import getUserprojectIds,UploadScriptAuditData,getScriptAudit
from Blockchain2.blockchainsetting import OWNER_KEY
print("Hello World inside ")
2024-04-30 07:33:51 +00:00
body_dict = json.loads(msg.body)
2024-05-13 08:47:04 +00:00
msg.delete()
2024-04-30 07:33:51 +00:00
user = body_dict.get("user")
s3_url = body_dict.get("s3-file-path")
screenplay_name = body_dict.get("screenplay_name")
author = body_dict.get("author")
language = body_dict.get("language")
script_ext = body_dict.get("script_ext")
script_file_name = body_dict.get("script_file_name")
2024-05-02 12:38:32 +00:00
language = "en"
print("112")
2024-04-30 07:33:51 +00:00
object_key = "INPUT/" + str(script_file_name)
2024-04-30 07:38:25 +00:00
local_file_path = "/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/audit_counter_files/" + script_file_name
2024-04-30 07:33:51 +00:00
s3_client = boto3.client('s3',
aws_access_key_id="AKIAQVLBBGCB45RMLKVW",
aws_secret_access_key="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",
region_name="ap-south-1"
)
2024-05-02 12:38:32 +00:00
print(object_key, local_file_path)
2024-04-30 07:33:51 +00:00
s3_client.download_file("conversion-kitchen", object_key, local_file_path)
2024-05-02 12:38:32 +00:00
print("113")
2024-04-30 07:33:51 +00:00
with open(local_file_path, 'rb') as file:
file_content = file.read()
file = ContentFile(
file_content,
script_file_name,
)
2024-04-30 07:33:51 +00:00
user = User.objects.get(username=user)
2024-04-30 07:33:51 +00:00
result = filesystem.new_screenplay_without_audit_in_background(
user,
author,
screenplay_name,
file,
"script-original",
language,
)
script_id = result.get("script", {}).get("id")
file_to_original = File.objects.get(
script=script_id,
type="script-original"
)
try:
update_audit_status(script_id, States.STARTED)
except:
update_audit_status(script_id, States.FAILURE)
try:
audit = NeutralAudit(script_id)
audit.audit_in_background()
2024-04-30 07:33:51 +00:00
ScriptAuditModel.objects.update_or_create(
script = Script.objects.get(
id = script_id
),
defaults={"status" : "SUCCESS"}
)
2024-04-30 07:33:51 +00:00
except:
ScriptAuditModel.objects.update_or_create(
script = Script.objects.get(
id = script_id
),
defaults={"status" : "FAILURE"}
)
2024-05-02 12:38:32 +00:00
status = ScriptAuditModel.objects.get(
script = Script.objects.get(
id = script_id
))
print("STATUS AUDIT",status)
# Blockchain
# if UserCredentialsForBlockchain.objects.filter(user=request.user).exists():
2024-05-02 12:38:32 +00:00
blockchain_obj = UserCredentialsForBlockchain.objects.get(user=user)
script_original= {}
audit_data={}
script_original["status"] = "STARTED"
script_original["script_id"] = script_id
with open(file_to_original.file.path, 'rb') as file:
2024-05-02 12:38:32 +00:00
hash = uploadDataToIPFSNode(file)
script_original["script_file"] = hash
script_original["type"] = "script-original"
script_original["script_file_path"] = file_to_original.file.path
2024-05-02 12:38:32 +00:00
audit_data["script-original"] = script_original
userkey= decryptionOfPrivate(blockchain_obj.privateKey)
print("userkey = ", str(userkey))
print("blockchain_obj.publicKey",blockchain_obj.publicKey)
print("blockchain_obj.privateKey",blockchain_obj.privateKey)
if status.status == "SUCCESS":
file_to_audit = File.objects.get(
script=script_id,
type="script-csv"
)
file_to_audit_report = File.objects.get(
script=script_id,
type="audit-report"
)
hash2 = ""
try:
file_to_audit_docx = File.objects.get(
script=script_id,
type="script-docx"
)
script_docx = {}
script_path1 = file_to_audit_docx.file.path
with open(script_path1, 'rb') as _file:
hash2 = uploadDataToIPFSNode(_file)
script_docx["script_file_path"] = script_path1
script_docx["script_file"] = hash2
script_docx["type"] = "script-docx"
audit_data["script-docx"] = script_docx
except:
csv_script_path = file_to_audit.file.path
df = pd.read_csv(csv_script_path)
docx = utilities.csv_to_docx(df)
temp_file_stream = BytesIO()
docx.save(temp_file_stream)
temp_file_stream.seek(0)
docx_file = ContentFile(
temp_file_stream.getvalue(),
"from_audited_csv_to_document.docx",
)
query_file = File.objects.create(
script= file_to_audit.script,
file=docx_file,
type="script-docx",
)
file_to_audit_docx = File.objects.get(
script=script_id,
type="script-docx"
)
script_docx = {}
script_path1 = file_to_audit_docx.file.path
script_size = file_to_audit_docx.file.size
with open(script_path1, 'rb') as _file:
hash2 = uploadDataToIPFSNode(_file)
script_docx["script_file_path"] = script_path1
script_docx["script_file"] = hash2
script_docx["type"] = "script-docx"
audit_data["script-docx"] = script_docx
## code for pdf also
try:
temp_dir = tempfile.TemporaryDirectory()
pdf_file_path = utilities.docx_to_pdf(
script_path1, temp_dir.name)
with open(pdf_file_path, "rb") as temp_pdf:
pdf_file = DjangoFile(temp_pdf,pdf_file_path.rsplit('/', 1)[1],)
query_file = File.objects.create(
script = file_to_audit.script,
file = pdf_file,
type="script-pdf",
)
script_pdf = {}
script_path1 = pdf_file_path
# script_size = file_to_audit_docx.file.size
with open(script_path1, 'rb') as _file:
hash2 = uploadDataToIPFSNode(_file)
script_pdf["script_file_path"] = script_path1
script_pdf["script_file"] = hash2
script_pdf["type"] = "script-pdf"
audit_data["script-pdf"] = script_pdf
except:
pass
# convert csv to json and store JSON
try:
csv_script_path = file_to_audit.file.path
df = pd.read_csv(csv_script_path)
df = df.loc[:, ["content", "script_element"]]
script_json: dict = json.loads(utilities.csv_to_json(df))
with tempfile.TemporaryDirectory() as temp_dir:
print("Temporary directory created:", temp_dir)
temp_filename = os.path.join(temp_dir, 'script_json_file.json')
print("temp file name ----------------?>",temp_filename)
2024-05-02 12:38:32 +00:00
with open(temp_filename, 'w') as json_file:
json.dump(script_json, json_file, indent=4)
script_json = {}
script_path1 = temp_filename
# script_size = file_to_audit_docx.file.size
with open(script_path1, 'rb') as _file:
hash2 = uploadDataToIPFSNode(_file)
script_json["script_file_path"] = script_path1
script_json["script_file"] = hash2
script_json["type"] = "script-json"
audit_data["script-json"] = script_json
print("data_uploaded")
2024-05-11 10:04:00 +00:00
except Exception as exp:
print("###ERROR:",exp)
print("######Error from JSON CREATION############")
2024-05-02 12:38:32 +00:00
pass
script_csv = {}
audit_report ={}
audit_report_path = file_to_audit_report.file.path
script_path = file_to_audit.file.path
script_size = file_to_audit.file.size
print("script_file_path_is_here",script_path)
with open(script_path, 'rb') as _file:
hash1 = uploadDataToIPFSNode(_file)
script_csv["script_file"] = hash1
script_csv["script_file_path"] = script_path
script_csv["type"] = "script-csv"
with open(audit_report_path, 'rb') as file1:
hash2 = uploadDataToIPFSNode(file1)
audit_report["script_file"] = hash2
audit_report["script_file_path"] = audit_report_path
audit_report["type"] = "audit-report"
2024-05-02 12:38:32 +00:00
audit_data["script-csv"]= script_csv
audit_data["audit-report"]= audit_report
2024-05-02 12:38:32 +00:00
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
print("tx_hash",Response)
transactioni_id = str(Response)
status.transaction_hash =str(transactioni_id)
status.save()
2024-05-11 02:26:37 +00:00
to_email = [user.email]
email_code = 'SB1'
key_value_aud = { "script_name" : str(screenplay_name)}
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
2024-05-11 02:26:37 +00:00
2024-05-02 12:38:32 +00:00
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
# addition_result = user_infos.update_info(request)
hash2 = hash_decrypation(hash2)
tx_id = Response
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
to_email = [user.email]
email_code = 'BL1'
key_value = {
"service":"Audited Script",
"hash": hash2,
"public key":blockchain_obj.publicKey,
"Transaction Hash": tx_id,
}
print("userkey = ", userkey)
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
data = {"message": "Success"}
# deleting the folder and files
# Delete a file
media_path = os.path.join(settings.MEDIA_ROOT, "audit_counter_files", script_file_name)
if os.path.exists(media_path):
os.remove(media_path)
print(f"File '{media_path}' deleted successfully.")
else:
print(f"File '{media_path}' does not exist.")
# Delete a folder and its contents
folder1_path = os.path.join(settings.MEDIA_ROOT, "scripts_folder", script_id)
if os.path.exists(folder1_path):
shutil.rmtree(folder1_path)
print(f"Folder '{folder1_path}' and its contents deleted successfully.")
else:
print(f"Folder '{folder1_path}' does not exist.")
folder2_path = os.path.join(settings.MEDIA_ROOT, "audit_folder", script_id)
if os.path.exists(folder2_path):
shutil.rmtree(folder2_path)
print(f"Folder '{folder2_path}' and its contents deleted successfully.")
else:
print(f"Folder '{folder2_path}' does not exist.")
return JsonResponse(data, status=200)
2024-05-02 12:38:32 +00:00
# return Response("Success", status=200)
else:
2024-05-11 02:26:37 +00:00
to_email = [user.email]
email_code = 'SB2'
key_value_aud = { "script_name" : str(screenplay_name)}
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud )
2024-05-02 12:38:32 +00:00
Response = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
print("tx_hash",Response)
hash = hash_decrypation(hash)
#certificate = certificateGenrate(request.user.username,"script audit",hash)
tx_id = Response
# certificate = certificateGenrate(request.user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
to_email = [user.email]
email_code = 'BL1'
key_value = {
"service":"Orginal Script Audit",
"hash": hash,
"public key":blockchain_obj.publicKey,
"private key":userkey,
"Transaction Hash": tx_id,
}
2024-05-11 02:26:37 +00:00
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
2024-05-02 12:38:32 +00:00
print("::::::::::::::",key_value)
print("userkey = ", userkey)
2024-04-30 07:33:51 +00:00
2024-04-30 04:59:37 +00:00
class Command(BaseCommand):
help = 'Custom Command to start django server and then start dequeuing from SQS'
def handle(self, *args, **options):
# Call the original runserver command"
# call_command('runserver', '0.0.0.0:4549')
command = 'python manage.py runserver 0.0.0.0:4549 > /home/ubuntu/Conversion_Kitchen_Code/logfile.log 2>&1'
# # Execute the command using subprocess.Popen
subprocess.Popen(command, shell=True)
# Wait for the server to start
while True:
try:
# Try to connect to the server
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect(('0.0.0.0', 4549))
break
except ConnectionRefusedError:
# If connection is refused, wait and try again
time.sleep(1)
# Once the server is up, run your function
self.run_after_server_startup()
def run_after_server_startup(self):
# Your function to run after the server starts
print("Development server is fully up and running! Run your function here.")
session = boto3.Session(
aws_access_key_id='AKIAQVLBBGCB45RMLKVW', # replace with your key
aws_secret_access_key='ZWc6KOc5LuBLuCEBDDfQTor+Q7rp3fFH74gVt+AA', # replace with your key
)
sqs = session.resource('sqs', region_name='ap-south-1')
print("Started Executin this from conversion")
queue = sqs.get_queue_by_name(QueueName="mnfqueue")
# try:
2024-05-01 04:58:44 +00:00
while True:
2024-04-30 04:59:37 +00:00
2024-05-01 04:58:44 +00:00
messages = queue.receive_messages(
MessageAttributeNames=["All"],
2024-05-01 05:22:33 +00:00
MaxNumberOfMessages=1,
WaitTimeSeconds=5,
2024-05-01 04:58:44 +00:00
)
if len(messages) > 0:
for msg in messages:
try:
print("Received message: %s: %s", msg.message_id, msg.body)
print(type(msg.body))
body_dict = json.loads(msg.body)
if body_dict['service_type'] == "conversion":
run_conversion(msg)
elif body_dict['service_type'] == "audit":
run_audit(msg)
except Exception as error:
print("error execution from queue: %s", error)
else:
break
2024-04-30 04:59:37 +00:00
print("Completed All Execution")