added some files audit fdx, autoemail sheet changes, blokchcian certificate html
This commit is contained in:
parent
f6e5863ffd
commit
e49f9e249d
Binary file not shown.
|
@ -22,6 +22,7 @@ from django.contrib.auth import get_user_model
|
||||||
from users.models import UserCredentialsForBlockchain
|
from users.models import UserCredentialsForBlockchain
|
||||||
from django.core.files.base import File as DjangoFile
|
from django.core.files.base import File as DjangoFile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from MNF import settings
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from auto_email.views import sendmail
|
from auto_email.views import sendmail
|
||||||
from lpp.certificate.createCertificate import certificateGenrate
|
from lpp.certificate.createCertificate import certificateGenrate
|
||||||
|
@ -177,189 +178,338 @@ def run_audit(msg):
|
||||||
print("blockchain_obj.publicKey",blockchain_obj.publicKey)
|
print("blockchain_obj.publicKey",blockchain_obj.publicKey)
|
||||||
print("blockchain_obj.privateKey",blockchain_obj.privateKey)
|
print("blockchain_obj.privateKey",blockchain_obj.privateKey)
|
||||||
if status.status == "SUCCESS":
|
if status.status == "SUCCESS":
|
||||||
file_to_audit = File.objects.get(
|
|
||||||
script=script_id,
|
if script_ext == "fdx":
|
||||||
type="script-csv"
|
|
||||||
)
|
file_to_audit = File.objects.get(
|
||||||
file_to_audit_report = File.objects.get(
|
|
||||||
script=script_id,
|
|
||||||
type="audit-report"
|
|
||||||
)
|
|
||||||
hash2 = ""
|
|
||||||
try:
|
|
||||||
file_to_audit_docx = File.objects.get(
|
|
||||||
script=script_id,
|
script=script_id,
|
||||||
type="script-docx"
|
type="script-csv"
|
||||||
)
|
)
|
||||||
script_docx = {}
|
hash2 = ""
|
||||||
script_path1 = file_to_audit_docx.file.path
|
try:
|
||||||
with open(script_path1, 'rb') as _file:
|
file_to_audit_docx = File.objects.get(
|
||||||
hash2 = uploadDataToIPFSNode(_file)
|
script=script_id,
|
||||||
script_docx["script_file_path"] = script_path1
|
type="script-docx"
|
||||||
script_docx["script_file"] = hash2
|
)
|
||||||
script_docx["type"] = "script-docx"
|
script_docx = {}
|
||||||
audit_data["script-docx"] = script_docx
|
script_path1 = file_to_audit_docx.file.path
|
||||||
except:
|
with open(script_path1, 'rb') as _file:
|
||||||
csv_script_path = file_to_audit.file.path
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
df = pd.read_csv(csv_script_path)
|
script_docx["script_file_path"] = script_path1
|
||||||
docx = utilities.csv_to_docx(df)
|
script_docx["script_file"] = hash2
|
||||||
|
script_docx["type"] = "script-docx"
|
||||||
|
audit_data["script-docx"] = script_docx
|
||||||
|
except:
|
||||||
|
csv_script_path = file_to_audit.file.path
|
||||||
|
df = pd.read_csv(csv_script_path)
|
||||||
|
docx = utilities.csv_to_docx(df)
|
||||||
|
|
||||||
temp_file_stream = BytesIO()
|
temp_file_stream = BytesIO()
|
||||||
docx.save(temp_file_stream)
|
docx.save(temp_file_stream)
|
||||||
temp_file_stream.seek(0)
|
temp_file_stream.seek(0)
|
||||||
|
|
||||||
docx_file = ContentFile(
|
docx_file = ContentFile(
|
||||||
temp_file_stream.getvalue(),
|
temp_file_stream.getvalue(),
|
||||||
"from_audited_csv_to_document.docx",
|
"from_audited_csv_to_document.docx",
|
||||||
)
|
)
|
||||||
|
|
||||||
query_file = File.objects.create(
|
|
||||||
script= file_to_audit.script,
|
|
||||||
file=docx_file,
|
|
||||||
type="script-docx",
|
|
||||||
)
|
|
||||||
file_to_audit_docx = File.objects.get(
|
|
||||||
script=script_id,
|
|
||||||
type="script-docx"
|
|
||||||
)
|
|
||||||
script_docx = {}
|
|
||||||
script_path1 = file_to_audit_docx.file.path
|
|
||||||
script_size = file_to_audit_docx.file.size
|
|
||||||
with open(script_path1, 'rb') as _file:
|
|
||||||
hash2 = uploadDataToIPFSNode(_file)
|
|
||||||
script_docx["script_file_path"] = script_path1
|
|
||||||
script_docx["script_file"] = hash2
|
|
||||||
script_docx["type"] = "script-docx"
|
|
||||||
audit_data["script-docx"] = script_docx
|
|
||||||
|
|
||||||
## code for pdf also
|
|
||||||
try:
|
|
||||||
temp_dir = tempfile.TemporaryDirectory()
|
|
||||||
pdf_file_path = utilities.docx_to_pdf(
|
|
||||||
script_path1, temp_dir.name)
|
|
||||||
|
|
||||||
with open(pdf_file_path, "rb") as temp_pdf:
|
|
||||||
|
|
||||||
pdf_file = DjangoFile(temp_pdf,pdf_file_path.rsplit('/', 1)[1],)
|
|
||||||
|
|
||||||
query_file = File.objects.create(
|
query_file = File.objects.create(
|
||||||
script = file_to_audit.script,
|
script= file_to_audit.script,
|
||||||
file = pdf_file,
|
file=docx_file,
|
||||||
type="script-pdf",
|
type="script-docx",
|
||||||
)
|
)
|
||||||
script_pdf = {}
|
file_to_audit_docx = File.objects.get(
|
||||||
script_path1 = pdf_file_path
|
script=script_id,
|
||||||
# script_size = file_to_audit_docx.file.size
|
type="script-docx"
|
||||||
with open(script_path1, 'rb') as _file:
|
)
|
||||||
hash2 = uploadDataToIPFSNode(_file)
|
script_docx = {}
|
||||||
script_pdf["script_file_path"] = script_path1
|
script_path1 = file_to_audit_docx.file.path
|
||||||
script_pdf["script_file"] = hash2
|
script_size = file_to_audit_docx.file.size
|
||||||
script_pdf["type"] = "script-pdf"
|
with open(script_path1, 'rb') as _file:
|
||||||
audit_data["script-pdf"] = script_pdf
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
except:
|
script_docx["script_file_path"] = script_path1
|
||||||
pass
|
script_docx["script_file"] = hash2
|
||||||
# convert csv to json and store JSON
|
script_docx["type"] = "script-docx"
|
||||||
try:
|
audit_data["script-docx"] = script_docx
|
||||||
csv_script_path = file_to_audit.file.path
|
|
||||||
df = pd.read_csv(csv_script_path)
|
## code for pdf also
|
||||||
df = df.loc[:, ["content", "script_element"]]
|
try:
|
||||||
script_json: dict = json.loads(utilities.csv_to_json(df))
|
temp_dir = tempfile.TemporaryDirectory()
|
||||||
with tempfile.TemporaryDirectory() as temp_dir:
|
pdf_file_path = utilities.docx_to_pdf(
|
||||||
print("Temporary directory created:", temp_dir)
|
script_path1, temp_dir.name)
|
||||||
temp_filename = os.path.join(temp_dir, 'script_json_file.json')
|
|
||||||
print("temp file name ----------------?>",temp_filename)
|
with open(pdf_file_path, "rb") as temp_pdf:
|
||||||
with open(temp_filename, 'w') as json_file:
|
|
||||||
json.dump(script_json, json_file, indent=4)
|
pdf_file = DjangoFile(temp_pdf,pdf_file_path.rsplit('/', 1)[1],)
|
||||||
script_json = {}
|
|
||||||
script_path1 = temp_filename
|
query_file = File.objects.create(
|
||||||
|
script = file_to_audit.script,
|
||||||
|
file = pdf_file,
|
||||||
|
type="script-pdf",
|
||||||
|
)
|
||||||
|
script_pdf = {}
|
||||||
|
script_path1 = pdf_file_path
|
||||||
# script_size = file_to_audit_docx.file.size
|
# script_size = file_to_audit_docx.file.size
|
||||||
with open(script_path1, 'rb') as _file:
|
with open(script_path1, 'rb') as _file:
|
||||||
hash2 = uploadDataToIPFSNode(_file)
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
script_json["script_file_path"] = script_path1
|
script_pdf["script_file_path"] = script_path1
|
||||||
script_json["script_file"] = hash2
|
script_pdf["script_file"] = hash2
|
||||||
script_json["type"] = "script-json"
|
script_pdf["type"] = "script-pdf"
|
||||||
audit_data["script-json"] = script_json
|
audit_data["script-pdf"] = script_pdf
|
||||||
print("data_uploaded")
|
except:
|
||||||
except Exception as exp:
|
pass
|
||||||
print("###ERROR:",exp)
|
# convert csv to json and store JSON
|
||||||
print("######Error from JSON CREATION############")
|
try:
|
||||||
pass
|
csv_script_path = file_to_audit.file.path
|
||||||
|
print("csv_path fetched",csv_script_path)
|
||||||
|
df = pd.read_csv(csv_script_path)
|
||||||
|
df = df.loc[:, ["content", "script_element"]]
|
||||||
|
script_json: dict = json.loads(utilities.csv_to_json(df))
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
print("Temporary directory created:", temp_dir)
|
||||||
|
temp_filename = os.path.join(temp_dir, 'script_json_file.json')
|
||||||
|
print("temp file name ----------------?>",temp_filename)
|
||||||
|
with open(temp_filename, 'w') as json_file:
|
||||||
|
json.dump(script_json, json_file, indent=4)
|
||||||
|
script_json = {}
|
||||||
|
script_path1 = temp_filename
|
||||||
|
# script_size = file_to_audit_docx.file.size
|
||||||
|
with open(script_path1, 'rb') as _file:
|
||||||
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
|
script_json["script_file_path"] = script_path1
|
||||||
|
script_json["script_file"] = hash2
|
||||||
|
script_json["type"] = "script-json"
|
||||||
|
audit_data["script-json"] = script_json
|
||||||
|
print("data_uploaded")
|
||||||
|
except Exception as exp:
|
||||||
|
print("###ERROR:",exp)
|
||||||
|
print("######Error from JSON CREATION############")
|
||||||
|
try:
|
||||||
|
script_csv = {}
|
||||||
|
script_path = file_to_audit.file.path
|
||||||
|
print("script_file_path_is_here",script_path)
|
||||||
|
with open(script_path, 'rb') as _file:
|
||||||
|
hash1 = uploadDataToIPFSNode(_file)
|
||||||
|
script_csv["script_file"] = hash1
|
||||||
|
script_csv["script_file_path"] = script_path
|
||||||
|
script_csv["type"] = "script-csv"
|
||||||
|
audit_data["script-csv"]= script_csv
|
||||||
|
except Exception as exp:
|
||||||
|
print(exp)
|
||||||
|
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||||
|
print("tx_hash",Response)
|
||||||
|
transactioni_id = str(Response)
|
||||||
|
status.transaction_hash =str(transactioni_id)
|
||||||
|
status.save()
|
||||||
|
to_email = [user.email]
|
||||||
|
print("####### #### to_email",to_email)
|
||||||
|
key_value_aud = { "script_name" : str(screenplay_name) }
|
||||||
|
email_code = 'SB1'
|
||||||
|
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud )
|
||||||
|
print("$$$$### after sendmail")
|
||||||
|
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||||
|
# addition_result = user_infos.update_info(request)
|
||||||
|
hash2 = hash_decrypation(hash2)
|
||||||
|
tx_id = Response
|
||||||
|
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||||
|
to_email = [user.email]
|
||||||
|
email_code = 'BL1'
|
||||||
|
key_value = {
|
||||||
|
"service":"Audited Script",
|
||||||
|
"hash": hash2,
|
||||||
|
"public key":blockchain_obj.publicKey,
|
||||||
|
"Transaction Hash": tx_id,
|
||||||
|
}
|
||||||
|
print("userkey = ", userkey)
|
||||||
|
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||||
|
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||||
|
data = {"message": "Success"}
|
||||||
|
|
||||||
script_csv = {}
|
|
||||||
audit_report ={}
|
|
||||||
audit_report_path = file_to_audit_report.file.path
|
|
||||||
script_path = file_to_audit.file.path
|
|
||||||
script_size = file_to_audit.file.size
|
|
||||||
|
|
||||||
print("script_file_path_is_here",script_path)
|
|
||||||
with open(script_path, 'rb') as _file:
|
|
||||||
hash1 = uploadDataToIPFSNode(_file)
|
|
||||||
script_csv["script_file"] = hash1
|
|
||||||
script_csv["script_file_path"] = script_path
|
|
||||||
script_csv["type"] = "script-csv"
|
|
||||||
with open(audit_report_path, 'rb') as file1:
|
|
||||||
hash2 = uploadDataToIPFSNode(file1)
|
|
||||||
audit_report["script_file"] = hash2
|
|
||||||
audit_report["script_file_path"] = audit_report_path
|
|
||||||
audit_report["type"] = "audit-report"
|
|
||||||
audit_data["script-csv"]= script_csv
|
|
||||||
audit_data["audit-report"]= audit_report
|
|
||||||
|
|
||||||
|
|
||||||
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
|
||||||
print("tx_hash",Response)
|
|
||||||
transactioni_id = str(Response)
|
|
||||||
status.transaction_hash =str(transactioni_id)
|
|
||||||
status.save()
|
|
||||||
|
|
||||||
to_email = [user.email]
|
|
||||||
email_code = 'SB1'
|
|
||||||
key_value_aud = { "script_name" : str(screenplay_name)}
|
|
||||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
|
||||||
|
|
||||||
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
|
||||||
# addition_result = user_infos.update_info(request)
|
|
||||||
hash2 = hash_decrypation(hash2)
|
|
||||||
tx_id = Response
|
|
||||||
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
|
||||||
to_email = [user.email]
|
|
||||||
email_code = 'BL1'
|
|
||||||
key_value = {
|
|
||||||
"service":"Audited Script",
|
|
||||||
"hash": hash2,
|
|
||||||
"public key":blockchain_obj.publicKey,
|
|
||||||
"Transaction Hash": tx_id,
|
|
||||||
}
|
|
||||||
print("userkey = ", userkey)
|
|
||||||
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
|
||||||
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
|
||||||
data = {"message": "Success"}
|
|
||||||
# deleting the folder and files
|
|
||||||
|
|
||||||
# Delete a file
|
|
||||||
media_path = os.path.join(settings.MEDIA_ROOT, "audit_counter_files", script_file_name)
|
|
||||||
if os.path.exists(media_path):
|
|
||||||
os.remove(media_path)
|
|
||||||
print(f"File '{media_path}' deleted successfully.")
|
|
||||||
else:
|
else:
|
||||||
print(f"File '{media_path}' does not exist.")
|
|
||||||
|
|
||||||
# Delete a folder and its contents
|
file_to_audit = File.objects.get(
|
||||||
folder1_path = os.path.join(settings.MEDIA_ROOT, "scripts_folder", script_id)
|
script=script_id,
|
||||||
if os.path.exists(folder1_path):
|
type="script-csv"
|
||||||
shutil.rmtree(folder1_path)
|
)
|
||||||
print(f"Folder '{folder1_path}' and its contents deleted successfully.")
|
file_to_audit_report = File.objects.get(
|
||||||
else:
|
script=script_id,
|
||||||
print(f"Folder '{folder1_path}' does not exist.")
|
type="audit-report"
|
||||||
|
)
|
||||||
|
hash2 = ""
|
||||||
|
try:
|
||||||
|
file_to_audit_docx = File.objects.get(
|
||||||
|
script=script_id,
|
||||||
|
type="script-docx"
|
||||||
|
)
|
||||||
|
script_docx = {}
|
||||||
|
script_path1 = file_to_audit_docx.file.path
|
||||||
|
with open(script_path1, 'rb') as _file:
|
||||||
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
|
script_docx["script_file_path"] = script_path1
|
||||||
|
script_docx["script_file"] = hash2
|
||||||
|
script_docx["type"] = "script-docx"
|
||||||
|
audit_data["script-docx"] = script_docx
|
||||||
|
except:
|
||||||
|
csv_script_path = file_to_audit.file.path
|
||||||
|
df = pd.read_csv(csv_script_path)
|
||||||
|
docx = utilities.csv_to_docx(df)
|
||||||
|
|
||||||
folder2_path = os.path.join(settings.MEDIA_ROOT, "audit_folder", script_id)
|
temp_file_stream = BytesIO()
|
||||||
if os.path.exists(folder2_path):
|
docx.save(temp_file_stream)
|
||||||
shutil.rmtree(folder2_path)
|
temp_file_stream.seek(0)
|
||||||
print(f"Folder '{folder2_path}' and its contents deleted successfully.")
|
|
||||||
else:
|
docx_file = ContentFile(
|
||||||
print(f"Folder '{folder2_path}' does not exist.")
|
temp_file_stream.getvalue(),
|
||||||
return JsonResponse(data, status=200)
|
"from_audited_csv_to_document.docx",
|
||||||
# return Response("Success", status=200)
|
)
|
||||||
|
|
||||||
|
query_file = File.objects.create(
|
||||||
|
script= file_to_audit.script,
|
||||||
|
file=docx_file,
|
||||||
|
type="script-docx",
|
||||||
|
)
|
||||||
|
file_to_audit_docx = File.objects.get(
|
||||||
|
script=script_id,
|
||||||
|
type="script-docx"
|
||||||
|
)
|
||||||
|
script_docx = {}
|
||||||
|
script_path1 = file_to_audit_docx.file.path
|
||||||
|
script_size = file_to_audit_docx.file.size
|
||||||
|
with open(script_path1, 'rb') as _file:
|
||||||
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
|
script_docx["script_file_path"] = script_path1
|
||||||
|
script_docx["script_file"] = hash2
|
||||||
|
script_docx["type"] = "script-docx"
|
||||||
|
audit_data["script-docx"] = script_docx
|
||||||
|
|
||||||
|
## code for pdf also
|
||||||
|
try:
|
||||||
|
temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
pdf_file_path = utilities.docx_to_pdf(
|
||||||
|
script_path1, temp_dir.name)
|
||||||
|
|
||||||
|
with open(pdf_file_path, "rb") as temp_pdf:
|
||||||
|
|
||||||
|
pdf_file = DjangoFile(temp_pdf,pdf_file_path.rsplit('/', 1)[1],)
|
||||||
|
|
||||||
|
query_file = File.objects.create(
|
||||||
|
script = file_to_audit.script,
|
||||||
|
file = pdf_file,
|
||||||
|
type="script-pdf",
|
||||||
|
)
|
||||||
|
script_pdf = {}
|
||||||
|
script_path1 = pdf_file_path
|
||||||
|
# script_size = file_to_audit_docx.file.size
|
||||||
|
with open(script_path1, 'rb') as _file:
|
||||||
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
|
script_pdf["script_file_path"] = script_path1
|
||||||
|
script_pdf["script_file"] = hash2
|
||||||
|
script_pdf["type"] = "script-pdf"
|
||||||
|
audit_data["script-pdf"] = script_pdf
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# convert csv to json and store JSON
|
||||||
|
try:
|
||||||
|
csv_script_path = file_to_audit.file.path
|
||||||
|
df = pd.read_csv(csv_script_path)
|
||||||
|
df = df.loc[:, ["content", "script_element"]]
|
||||||
|
script_json: dict = json.loads(utilities.csv_to_json(df))
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
print("Temporary directory created:", temp_dir)
|
||||||
|
temp_filename = os.path.join(temp_dir, 'script_json_file.json')
|
||||||
|
print("temp file name ----------------?>",temp_filename)
|
||||||
|
with open(temp_filename, 'w') as json_file:
|
||||||
|
json.dump(script_json, json_file, indent=4)
|
||||||
|
script_json = {}
|
||||||
|
script_path1 = temp_filename
|
||||||
|
# script_size = file_to_audit_docx.file.size
|
||||||
|
with open(script_path1, 'rb') as _file:
|
||||||
|
hash2 = uploadDataToIPFSNode(_file)
|
||||||
|
script_json["script_file_path"] = script_path1
|
||||||
|
script_json["script_file"] = hash2
|
||||||
|
script_json["type"] = "script-json"
|
||||||
|
audit_data["script-json"] = script_json
|
||||||
|
print("data_uploaded")
|
||||||
|
except Exception as exp:
|
||||||
|
print("###ERROR:",exp)
|
||||||
|
print("######Error from JSON CREATION############")
|
||||||
|
pass
|
||||||
|
|
||||||
|
script_csv = {}
|
||||||
|
audit_report ={}
|
||||||
|
audit_report_path = file_to_audit_report.file.path
|
||||||
|
script_path = file_to_audit.file.path
|
||||||
|
script_size = file_to_audit.file.size
|
||||||
|
|
||||||
|
print("script_file_path_is_here",script_path)
|
||||||
|
with open(script_path, 'rb') as _file:
|
||||||
|
hash1 = uploadDataToIPFSNode(_file)
|
||||||
|
script_csv["script_file"] = hash1
|
||||||
|
script_csv["script_file_path"] = script_path
|
||||||
|
script_csv["type"] = "script-csv"
|
||||||
|
with open(audit_report_path, 'rb') as file1:
|
||||||
|
hash2 = uploadDataToIPFSNode(file1)
|
||||||
|
audit_report["script_file"] = hash2
|
||||||
|
audit_report["script_file_path"] = audit_report_path
|
||||||
|
audit_report["type"] = "audit-report"
|
||||||
|
audit_data["script-csv"]= script_csv
|
||||||
|
audit_data["audit-report"]= audit_report
|
||||||
|
|
||||||
|
|
||||||
|
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||||
|
print("tx_hash",Response)
|
||||||
|
transactioni_id = str(Response)
|
||||||
|
status.transaction_hash =str(transactioni_id)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
to_email = [user.email]
|
||||||
|
email_code = 'SB1'
|
||||||
|
key_value_aud = { "script_name" : str(screenplay_name)}
|
||||||
|
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
||||||
|
|
||||||
|
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||||
|
# addition_result = user_infos.update_info(request)
|
||||||
|
hash2 = hash_decrypation(hash2)
|
||||||
|
tx_id = Response
|
||||||
|
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||||
|
to_email = [user.email]
|
||||||
|
email_code = 'BL1'
|
||||||
|
key_value = {
|
||||||
|
"service":"Audited Script",
|
||||||
|
"hash": hash2,
|
||||||
|
"public key":blockchain_obj.publicKey,
|
||||||
|
"Transaction Hash": tx_id,
|
||||||
|
}
|
||||||
|
print("userkey = ", userkey)
|
||||||
|
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||||
|
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||||
|
data = {"message": "Success"}
|
||||||
|
# deleting the folder and files
|
||||||
|
|
||||||
|
# Delete a file
|
||||||
|
media_path = os.path.join(settings.MEDIA_ROOT, "audit_counter_files", script_file_name)
|
||||||
|
if os.path.exists(media_path):
|
||||||
|
os.remove(media_path)
|
||||||
|
print(f"File '{media_path}' deleted successfully.")
|
||||||
|
else:
|
||||||
|
print(f"File '{media_path}' does not exist.")
|
||||||
|
|
||||||
|
# Delete a folder and its contents
|
||||||
|
folder1_path = os.path.join(settings.MEDIA_ROOT, "scripts_folder", script_id)
|
||||||
|
if os.path.exists(folder1_path):
|
||||||
|
shutil.rmtree(folder1_path)
|
||||||
|
print(f"Folder '{folder1_path}' and its contents deleted successfully.")
|
||||||
|
else:
|
||||||
|
print(f"Folder '{folder1_path}' does not exist.")
|
||||||
|
|
||||||
|
folder2_path = os.path.join(settings.MEDIA_ROOT, "audit_folder", script_id)
|
||||||
|
if os.path.exists(folder2_path):
|
||||||
|
shutil.rmtree(folder2_path)
|
||||||
|
print(f"Folder '{folder2_path}' and its contents deleted successfully.")
|
||||||
|
else:
|
||||||
|
print(f"Folder '{folder2_path}' does not exist.")
|
||||||
|
return JsonResponse(data, status=200)
|
||||||
|
# return Response("Success", status=200)
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -19,6 +19,7 @@ from lpp.models import LPPTASKDatabase
|
||||||
|
|
||||||
from centralizePayment.views import auto_refund
|
from centralizePayment.views import auto_refund
|
||||||
from juggernaut.views import update_juggernaut
|
from juggernaut.views import update_juggernaut
|
||||||
|
from juggernaut.models import JuggernautPackage
|
||||||
from auto_email.views import sendmail
|
from auto_email.views import sendmail
|
||||||
from lpp.views import task_assigner
|
from lpp.views import task_assigner
|
||||||
from utils import filesystem, utilities
|
from utils import filesystem, utilities
|
||||||
|
@ -435,36 +436,40 @@ class Conversion:
|
||||||
translation_id=self.translated_script_object.translation_id)
|
translation_id=self.translated_script_object.translation_id)
|
||||||
|
|
||||||
"""Juggernaut Payment Updation"""
|
"""Juggernaut Payment Updation"""
|
||||||
if self.juggernaut_pages_deduction:
|
try:
|
||||||
user_data = JuggernautPackage.objects.filter(user_id = self.user)
|
if self.juggernaut_pages_deduction:
|
||||||
if user_data.count() == 1:
|
user_data = JuggernautPackage.objects.filter(user_id = self.user)
|
||||||
pass
|
if user_data.count() == 1:
|
||||||
else:
|
pass
|
||||||
self.session = {}
|
else:
|
||||||
try:
|
self.session = {}
|
||||||
if user_data.count() > 1:
|
try:
|
||||||
|
if user_data.count() > 1:
|
||||||
|
max_beat = 0
|
||||||
|
for i in user_data:
|
||||||
|
if i.conversion_pages >= max_beat:
|
||||||
|
user_data = i
|
||||||
|
self.session['user_data'] = str(user_data.id)
|
||||||
|
except Exception as e:
|
||||||
|
print("Juggernaut error is",e)
|
||||||
max_beat = 0
|
max_beat = 0
|
||||||
for i in user_data:
|
|
||||||
if i.conversion_pages >= max_beat:
|
|
||||||
user_data = i
|
|
||||||
self.session['user_data'] = str(user_data.id)
|
|
||||||
except:
|
|
||||||
max_beat = 0
|
|
||||||
|
|
||||||
self.session['user_data'] = str(user_data.id)
|
self.session['user_data'] = str(user_data.id)
|
||||||
update_juggernaut(self,
|
update_juggernaut(self,
|
||||||
user_id=self.user.id,
|
user_id=self.user.id,
|
||||||
service_name="conversion",
|
service_name="conversion",
|
||||||
conversion_pages=self.juggernaut_pages_deduction,
|
conversion_pages=self.juggernaut_pages_deduction,
|
||||||
associated_project=self.original_script_object,
|
associated_project=self.original_script_object,
|
||||||
translation_language=self.language_set)
|
translation_language=self.language_set)
|
||||||
self.original_script_object.is_juggernaut_used = True
|
self.original_script_object.is_juggernaut_used = True
|
||||||
# if script_original.languages_juggernaut != "":
|
# if script_original.languages_juggernaut != "":
|
||||||
# # script_original.languages_juggernaut = str(script_original.languages_juggernaut) + "," + str(
|
# # script_original.languages_juggernaut = str(script_original.languages_juggernaut) + "," + str(
|
||||||
# # script_translated.dial_dest_language)
|
# # script_translated.dial_dest_language)
|
||||||
# else:
|
# else:
|
||||||
# script_original.languages_juggernaut = str(script_translated.dial_dest_language)
|
# script_original.languages_juggernaut = str(script_translated.dial_dest_language)
|
||||||
self.original_script_object.save()
|
self.original_script_object.save()
|
||||||
|
except Exception as e:
|
||||||
|
print("Error in juggernaut updation is",e)
|
||||||
"""Juggernaut Payment Updation Done"""
|
"""Juggernaut Payment Updation Done"""
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,14 +15,14 @@
|
||||||
|
|
||||||
<body style="margin: 0; padding: 0; box-sizing: border-box; font-family: 'Poppins', sans-serif;font-size: 16px;">
|
<body style="margin: 0; padding: 0; box-sizing: border-box; font-family: 'Poppins', sans-serif;font-size: 16px;">
|
||||||
<div style="max-width: 90vw; box-shadow: 0px 4px 27px 0px #00000040; margin: 16px auto;">
|
<div style="max-width: 90vw; box-shadow: 0px 4px 27px 0px #00000040; margin: 16px auto;">
|
||||||
<img src="/home/mnfidea/mnf/MNF/app/static/media/HeaderImageCertificate.png" style="width: 100%;">
|
<img src="/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/HeaderImageCertificate.png" style="width: 100%;">
|
||||||
<table style="border-collapse: collapse;
|
<table style="border-collapse: collapse;
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
margin: 0px 0 50px 0px">
|
margin: 0px 0 50px 0px">
|
||||||
<tr>
|
<tr>
|
||||||
<td rowspan="4" style="text-align: center;">
|
<td rowspan="4" style="text-align: center;">
|
||||||
<img src="/home/mnfidea/mnf/MNF/app/static/media/verticalLineGolden.png" alt="v-line" style="height: 300px;
|
<img src="/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/verticalLineGolden.png" alt="v-line" style="height: 300px;
|
||||||
width: 30px;
|
width: 30px;
|
||||||
position: relative;
|
position: relative;
|
||||||
top: -70px; padding: 0 20px;">
|
top: -70px; padding: 0 20px;">
|
||||||
|
@ -36,7 +36,7 @@
|
||||||
</h2>
|
</h2>
|
||||||
</td>
|
</td>
|
||||||
<td rowspan="4" style="text-align: center;">
|
<td rowspan="4" style="text-align: center;">
|
||||||
<img src="/home/mnfidea/mnf/MNF/app/static/media/verticalLineGolden.png" alt="v-line" style="height: 300px;
|
<img src="/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/verticalLineGolden.png" alt="v-line" style="height: 300px;
|
||||||
width: 30px;
|
width: 30px;
|
||||||
position: relative;
|
position: relative;
|
||||||
top: 70px; padding: 0 20px;">
|
top: 70px; padding: 0 20px;">
|
||||||
|
@ -124,7 +124,7 @@
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding-left: 90px ; display: flex; justify-content: space-evenly;">
|
<td style="padding-left: 90px ; display: flex; justify-content: space-evenly;">
|
||||||
<div style="margin-left: -100px;">
|
<div style="margin-left: -100px;">
|
||||||
<img src="/home/mnfidea/mnf/MNF/app/static/media/DoogleFilm.png" alt="doodle" style="width: 144.7px;
|
<img src="/home/ubuntu/Conversion_Kitchen_Code/kitchen_counter/media/DoogleFilm.png" alt="doodle" style="width: 144.7px;
|
||||||
height: 121.06px;">
|
height: 121.06px;">
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
|
|
Loading…
Reference in New Issue