openai audit changes
This commit is contained in:
parent
3ab7d8efcc
commit
97f5af0a48
|
@ -0,0 +1,77 @@
|
|||
import csv
|
||||
import pandas as pd
|
||||
import os
|
||||
from openai import OpenAI
|
||||
import sys
|
||||
|
||||
|
||||
|
||||
|
||||
def classify_lines(input_file_path, audit_ai_csv) -> list[str]:
|
||||
from scriptAudit.utils import remove_empty_content,remove_leading_numbers, remove_numeric_only_content,remove_emptyline_rows,merge_consecutive_action_lines,merge_consecutive_dialogue_lines
|
||||
from scriptAudit.utils import insert_blank_lines,add_fade_in_out,remove_asterisks, merge_consecutive_action_lines_new,extract_labeled_lines, remove_trailing_speaker
|
||||
|
||||
with open(input_file_path, "r") as f:
|
||||
raw_lines = [line.strip() for line in f.readlines() if line.strip()]
|
||||
|
||||
chunked_results = []
|
||||
client = OpenAI(api_key=os.getenv('openai_key'))
|
||||
prompt = (
|
||||
"You are a screenplay assistant. For each line below, classify it using one of these labels: "
|
||||
"slugline, speaker, dialogue, action, parenthetical, transition. Return each line followed by its label in curly braces.\n\n"
|
||||
)
|
||||
for i in range(0, len(raw_lines), 20):
|
||||
chunk = raw_lines[i:i + 20]
|
||||
final_prompt = prompt + "\n".join(chunk)
|
||||
response = client.responses.create(
|
||||
model="gpt-4o",
|
||||
input=[
|
||||
{
|
||||
"role": "developer",
|
||||
"content": """You are a screenplay auditor. For each line below, classify it using one of these labels:
|
||||
slugline, speaker, dialogue, action, parenthetical, transition, special_term, title. Return each line followed by its label in curly braces.\n\n
|
||||
**Examples:**
|
||||
INT. ROOM – NIGHT {slugline}
|
||||
KITCHEN – DAY {slugline}
|
||||
JOHN {speaker}
|
||||
(quietly) {parenthetical}
|
||||
JOHN (O.S.) {speaker}
|
||||
JOHN (angrily) {speaker}
|
||||
I knew you’d come. {dialogue}
|
||||
She turns away from the window. {action}
|
||||
FADE OUT. {transition}
|
||||
THE END {title}
|
||||
(V.O.) {special_term}
|
||||
John CONT'D {speaker}"""
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "I need you to classify the lines below. Please provide the classification in the format: 'line {label}'\n\n" + "\n".join(chunk)
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
classified = response.output_text.splitlines()
|
||||
chunked_results.extend(classified)
|
||||
extracted = extract_labeled_lines(chunked_results)
|
||||
with open(audit_ai_csv, mode='w', newline='', encoding='utf-8') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(["content", "script_element"])
|
||||
writer.writerows(extracted)
|
||||
print("Classification completed.")
|
||||
|
||||
df = pd.read_csv(audit_ai_csv)
|
||||
df = remove_empty_content(df)
|
||||
df = remove_asterisks(df)
|
||||
df = remove_leading_numbers(df)
|
||||
df = remove_numeric_only_content(df)
|
||||
df = remove_emptyline_rows(df)
|
||||
df = remove_trailing_speaker(df)
|
||||
# df = merge_consecutive_action_lines(df)
|
||||
df = merge_consecutive_action_lines_new(df)
|
||||
df = merge_consecutive_dialogue_lines(df)
|
||||
df = insert_blank_lines(df)
|
||||
|
||||
df = add_fade_in_out(df)
|
||||
|
||||
return df
|
|
@ -15,7 +15,7 @@ import datetime
|
|||
import pytz
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from scriptAudit.audit_ai import classify_lines
|
||||
|
||||
|
||||
|
||||
|
@ -1544,8 +1544,14 @@ class NeutralAudit:
|
|||
if self.gen_int_files:
|
||||
df.to_csv(csv_after_examined_speaker_pos_after_mix, index = False)
|
||||
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("AFTER sf.examine_speaker_pos 39 : Before update_audit_df :audit\n")
|
||||
|
||||
audit_df = self.update_audit_df(df, audit_df)
|
||||
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("AFTER sf.examine_speaker_pos 39 : After update_audit_df :audit\n")
|
||||
|
||||
###
|
||||
df = sf.examine_speaker_next_lines(df, audit_df)
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
|
@ -2737,6 +2743,91 @@ class NeutralAudit:
|
|||
|
||||
return character_scripts_dict
|
||||
|
||||
def audit_ai_script(self):
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("INSIDE OPPENAI AUDIT\n")
|
||||
|
||||
self.audit_model_obj.error_msg ="INIDE AUDIT AI"
|
||||
self.audit_model_obj.save()
|
||||
|
||||
input_txt = self.base_file_path + "temp.txt"
|
||||
audit_ai_csv = self.base_file_path + "audit_ai.csv"
|
||||
post_processed_audit_ai_csv = self.base_file_path + "post_processed_df.csv"
|
||||
print("AI audit in Process")
|
||||
try:
|
||||
post_processed_df = classify_lines(input_txt,audit_ai_csv)
|
||||
print("AI audit done")
|
||||
self.audit_model_obj.status = States.SUCCESS
|
||||
self.audit_model_obj.error_msg ="AI AUDIT COMPLETED"
|
||||
self.audit_model_obj.save()
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("OPPENAI AUDIT Complete\n")
|
||||
except Exception as exp:
|
||||
print("AI audit error",exp)
|
||||
self.audit_model_obj.status = States.FAILURE
|
||||
self.audit_model_obj.results = exp
|
||||
self.audit_model_obj.error_msg ="AI AUDIT FAILED"
|
||||
self.audit_model_obj.save()
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("OPPENAI AUDIT FAILED\n")
|
||||
return False
|
||||
# print(end_time)
|
||||
|
||||
language_check_df = sf.check_and_copy_rows(post_processed_df)
|
||||
try:
|
||||
script_language, dialogue_language = sf.language_detector_for_csv(language_check_df)
|
||||
print("script_language",script_language)
|
||||
print("dialogue_language",dialogue_language)
|
||||
|
||||
unique_script_languages = ', '.join(set(lang[0] for lang in script_language))
|
||||
unique_dialogue_languages = ', '.join(set(lang[0] for lang in dialogue_language))
|
||||
print("unique_script_languages",unique_script_languages)
|
||||
print("unique_dialogue_languages",unique_dialogue_languages)
|
||||
print("langauage detection worked")
|
||||
except Exception as exp:
|
||||
print(repr(exp))
|
||||
unique_script_languages = "ENGLISH-"
|
||||
unique_dialogue_languages = "ENGLISH-"
|
||||
print("Langauuge detectedion csv didnt work-")
|
||||
|
||||
merged_df = pd.DataFrame(post_processed_df)
|
||||
merged_df.to_csv(post_processed_audit_ai_csv, index=False)
|
||||
print("post processed df saved")
|
||||
audited_docx_path = self.base_file_path + "AI_csv_to_docx_audited.docx"
|
||||
docx = sf.csv_to_docx(merged_df)
|
||||
print("docx saved")
|
||||
docx.save(audited_docx_path)
|
||||
|
||||
print("\n\n dot to comma changes in slug")
|
||||
audited_file_name = self.script_name + ".csv"
|
||||
|
||||
req_file = ContentFile(
|
||||
(merged_df.to_csv(index=False, path_or_buf=None)).encode("utf-8"),
|
||||
audited_file_name,
|
||||
)
|
||||
|
||||
File.objects.create(
|
||||
script=Script.objects.get(id=self.script_id),
|
||||
type="script-csv",
|
||||
file=req_file,
|
||||
)
|
||||
|
||||
docx_file = ContentFile(
|
||||
open(audited_docx_path, 'rb').read(),
|
||||
"from_audited_csv_to_document.docx",
|
||||
)
|
||||
|
||||
|
||||
File.objects.create(
|
||||
script=Script.objects.get(id=self.script_id),
|
||||
type="script-docx",
|
||||
file=docx_file,
|
||||
)
|
||||
print("script-docx object created")
|
||||
with open(self.base_file_path + "time_taken.txt", "a") as file007:
|
||||
file007.write("OPPENAI AUDIT PROCESS COMPLETE\n")
|
||||
return audited_docx_path, unique_script_languages, unique_dialogue_languages
|
||||
|
||||
def audit_in_background(self):
|
||||
# # commenting os.fork to make code run in foreground
|
||||
# if os.fork() != 0:
|
||||
|
|
|
@ -2,20 +2,23 @@ from multiprocessing import Process
|
|||
import os
|
||||
from centralisedFileSystem.models import Script
|
||||
from scriptAudit.models import ScriptAuditModel, States
|
||||
# from scriptAudit.tasks import NeutralAuditTask
|
||||
from scriptAudit.mnf_script_audit import NeutralAudit
|
||||
from datetime import datetime
|
||||
from django.core.files.base import ContentFile
|
||||
from utils.filesystem import new_screenplay, create_script_docx,get_file_path,new_screenplay_without_audit_in_background
|
||||
from .mnf_script_audit import NeutralAudit
|
||||
# from narration.vectorcode.code.vector_generation import vector_gen
|
||||
|
||||
from time import sleep
|
||||
import time
|
||||
|
||||
import pandas as pd
|
||||
import re
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
|
||||
def update_audit_status(script_id : str, status : str) -> None:
|
||||
|
||||
|
@ -101,31 +104,32 @@ def check_status_and_trigger_vector(script_id,v_id):
|
|||
1.script_id --> audit id
|
||||
2.v_id vector id
|
||||
"""
|
||||
print("vector_id:",v_id)
|
||||
audit_completed = False
|
||||
while not audit_completed:
|
||||
status = ScriptAuditModel.objects.get(
|
||||
script = Script.objects.get(
|
||||
id = script_id
|
||||
))
|
||||
pass
|
||||
# print("vector_id:",v_id)
|
||||
# audit_completed = False
|
||||
# while not audit_completed:
|
||||
# status = ScriptAuditModel.objects.get(
|
||||
# script = Script.objects.get(
|
||||
# id = script_id
|
||||
# ))
|
||||
|
||||
print("waiting for audit to get complete")
|
||||
if status.status == "SUCCESS":
|
||||
try:
|
||||
a_path = get_file_path(str(script_id), "script-docx")
|
||||
vector_gen(a_path,v_id)
|
||||
audit_completed = True
|
||||
break
|
||||
except:
|
||||
create_script_docx(script_id)
|
||||
a_path = get_file_path(str(script_id), "script-docx")
|
||||
print("Audited script path is fetched")
|
||||
vector_gen(a_path,v_id)
|
||||
audit_completed = True
|
||||
break
|
||||
elif status.status == "FAILURE":
|
||||
print("Audit Failed")
|
||||
audit_completed = True
|
||||
# print("waiting for audit to get complete")
|
||||
# if status.status == "SUCCESS":
|
||||
# try:
|
||||
# a_path = get_file_path(str(script_id), "script-docx")
|
||||
# vector_gen(a_path,v_id)
|
||||
# audit_completed = True
|
||||
# break
|
||||
# except:
|
||||
# create_script_docx(script_id)
|
||||
# a_path = get_file_path(str(script_id), "script-docx")
|
||||
# print("Audited script path is fetched")
|
||||
# vector_gen(a_path,v_id)
|
||||
# audit_completed = True
|
||||
# break
|
||||
# elif status.status == "FAILURE":
|
||||
# print("Audit Failed")
|
||||
# audit_completed = True
|
||||
|
||||
|
||||
|
||||
|
@ -269,3 +273,226 @@ def send_email_to_user(user,screenplay_name,subject,message):# removed flag = 1
|
|||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
|
||||
def split_pdf_into_chunks(input_pdf_path, chunk_size=3):
|
||||
chunk_uuid = f"chunk_{uuid.uuid4().hex[:8]}"
|
||||
chunk_folder = os.path.join("/content", chunk_uuid)
|
||||
os.makedirs(chunk_folder, exist_ok=True)
|
||||
reader = PdfReader(input_pdf_path)
|
||||
total_pages = len(reader.pages)
|
||||
file_paths = []
|
||||
for start in range(0, total_pages, chunk_size):
|
||||
writer = PdfWriter()
|
||||
for page_num in range(start, min(start + chunk_size, total_pages)):
|
||||
writer.add_page(reader.pages[page_num])
|
||||
chunk_path = os.path.join(chunk_folder, f"chunk_{start // chunk_size + 1}.pdf")
|
||||
with open(chunk_path, "wb") as f:
|
||||
writer.write(f)
|
||||
file_paths.append(chunk_path)
|
||||
return file_paths
|
||||
|
||||
def split_text_file_by_lines(input_txt_path, lines_per_chunk=45):
|
||||
chunk_uuid = f"chunk_{uuid.uuid4().hex[:8]}"
|
||||
chunk_folder = os.path.join("/content", chunk_uuid)
|
||||
os.makedirs(chunk_folder, exist_ok=True)
|
||||
|
||||
with open(input_txt_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
file_paths = []
|
||||
for i in range(0, len(lines), lines_per_chunk):
|
||||
chunk_lines = lines[i:i + lines_per_chunk]
|
||||
chunk_path = os.path.join(chunk_folder, f"chunk_{i // lines_per_chunk + 1}.txt")
|
||||
with open(chunk_path, "w", encoding="utf-8") as f:
|
||||
f.writelines(chunk_lines)
|
||||
file_paths.append(chunk_path)
|
||||
if len(file_paths) == 10:
|
||||
break
|
||||
|
||||
print(f"✅ Created {len(file_paths)} chunks in {chunk_folder}")
|
||||
return file_paths
|
||||
|
||||
|
||||
def extract_labeled_lines(response_text):
|
||||
pattern = re.compile(r"(.*?)(?:\s*)\{(.*?)\}")
|
||||
rows = []
|
||||
for line in response_text.strip().split("\n"):
|
||||
matches = pattern.findall(line.strip())
|
||||
for content, label in matches:
|
||||
rows.append([content.strip(), label.strip()])
|
||||
return rows
|
||||
|
||||
|
||||
|
||||
def remove_empty_content(df):
|
||||
df = df.dropna(subset=['content'])
|
||||
df = df[df['content'].str.strip() != '']
|
||||
return df
|
||||
|
||||
def remove_leading_numbers(df: pd.DataFrame) -> pd.DataFrame:
|
||||
def clean_content(text):
|
||||
if isinstance(text, str):
|
||||
return re.sub(r'^\s*\d+\.\s*', '', text)
|
||||
return text
|
||||
|
||||
df['content'] = df['content'].apply(clean_content)
|
||||
return df
|
||||
|
||||
def remove_numeric_only_content(df: pd.DataFrame) -> pd.DataFrame:
|
||||
def is_numeric_only(text):
|
||||
if isinstance(text, str):
|
||||
return re.fullmatch(r'\s*\d+\s*', text) is not None
|
||||
return False
|
||||
|
||||
return df[~df['content'].apply(is_numeric_only)].reset_index(drop=True)
|
||||
|
||||
def remove_emptyline_rows(df: pd.DataFrame) -> pd.DataFrame:
|
||||
def is_only_empty_line_repeats(text):
|
||||
if not isinstance(text, str):
|
||||
return False
|
||||
return re.fullmatch(r'(\s*\(empty line\)\s*)+', text.strip(), flags=re.IGNORECASE) is not None
|
||||
|
||||
return df[~df['content'].apply(is_only_empty_line_repeats)].reset_index(drop=True)
|
||||
|
||||
|
||||
def merge_consecutive_action_lines(df: pd.DataFrame) -> pd.DataFrame:
|
||||
merged_rows = []
|
||||
prev_row = None
|
||||
|
||||
for _, row in df.iterrows():
|
||||
if (
|
||||
prev_row is not None and
|
||||
row['script_element'] == 'action' and
|
||||
prev_row['script_element'] == 'action'
|
||||
):
|
||||
prev_row['content'] += " " + row['content']
|
||||
else:
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
prev_row = row.copy()
|
||||
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
|
||||
return pd.DataFrame(merged_rows).reset_index(drop=True)
|
||||
|
||||
def merge_consecutive_action_lines_new(df: pd.DataFrame) -> pd.DataFrame:
|
||||
merged_rows = []
|
||||
prev_row = None
|
||||
|
||||
for _, row in df.iterrows():
|
||||
current_is_action = row['script_element'] == 'action'
|
||||
previous_is_action = prev_row is not None and prev_row['script_element'] == 'action'
|
||||
|
||||
if (
|
||||
current_is_action and
|
||||
previous_is_action and
|
||||
not prev_row['content'].strip().endswith(('.', '!', '?'))
|
||||
):
|
||||
# Merge into previous action
|
||||
prev_row['content'] += ' ' + row['content'].strip()
|
||||
else:
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
prev_row = row.copy()
|
||||
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
|
||||
return pd.DataFrame(merged_rows).reset_index(drop=True)
|
||||
|
||||
|
||||
def merge_consecutive_dialogue_lines(df: pd.DataFrame) -> pd.DataFrame:
|
||||
merged_rows = []
|
||||
prev_row = None
|
||||
|
||||
for _, row in df.iterrows():
|
||||
if (
|
||||
prev_row is not None and
|
||||
row['script_element'] == 'dialogue' and
|
||||
prev_row['script_element'] == 'dialogue'
|
||||
):
|
||||
prev_row['content'] += " " + row['content']
|
||||
else:
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
prev_row = row.copy()
|
||||
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
|
||||
return pd.DataFrame(merged_rows).reset_index(drop=True)
|
||||
|
||||
def insert_blank_lines(df: pd.DataFrame) -> pd.DataFrame:
|
||||
insert_after = {"slugline", "dialogue", "action", "transition"}
|
||||
new_rows = []
|
||||
|
||||
for _, row in df.iterrows():
|
||||
new_rows.append(row)
|
||||
if row['script_element'] in insert_after:
|
||||
new_rows.append(pd.Series({'content': '', 'script_element': 'blank'}))
|
||||
|
||||
return pd.DataFrame(new_rows).reset_index(drop=True)
|
||||
|
||||
def add_fade_in_out(df: pd.DataFrame) -> pd.DataFrame:
|
||||
first_slugline_idx = df[df['script_element'] == 'slugline'].index.min()
|
||||
|
||||
if pd.isna(first_slugline_idx):
|
||||
return df
|
||||
|
||||
df_trimmed = df.loc[first_slugline_idx:].reset_index(drop=True)
|
||||
|
||||
fade_in_row = pd.DataFrame([{'content': 'FADE IN', 'script_element': 'transition'}])
|
||||
df_trimmed = pd.concat([fade_in_row, df_trimmed], ignore_index=True)
|
||||
|
||||
fade_out_row = pd.DataFrame([{'content': 'FADE OUT', 'script_element': 'transition'}])
|
||||
df_trimmed = pd.concat([df_trimmed, fade_out_row], ignore_index=True)
|
||||
|
||||
return df_trimmed
|
||||
|
||||
def remove_asterisks(df: pd.DataFrame) -> pd.DataFrame:
|
||||
df['content'] = df['content'].astype(str).str.replace(r'\*+', '', regex=True)
|
||||
return df
|
||||
|
||||
|
||||
def merge_consecutive_action_lines_new(df: pd.DataFrame) -> pd.DataFrame:
|
||||
merged_rows = []
|
||||
prev_row = None
|
||||
|
||||
for _, row in df.iterrows():
|
||||
current_is_action = row['script_element'] == 'action'
|
||||
previous_is_action = prev_row is not None and prev_row['script_element'] == 'action'
|
||||
|
||||
if (
|
||||
current_is_action and
|
||||
previous_is_action and
|
||||
not prev_row['content'].strip().endswith(('.', '!', '?'))
|
||||
):
|
||||
# Merge into previous action
|
||||
prev_row['content'] += ' ' + row['content'].strip()
|
||||
else:
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
prev_row = row.copy()
|
||||
|
||||
if prev_row is not None:
|
||||
merged_rows.append(prev_row)
|
||||
|
||||
return pd.DataFrame(merged_rows).reset_index(drop=True)
|
||||
|
||||
|
||||
|
||||
def extract_labeled_lines(response_lines: list[str]):
|
||||
pattern = re.compile(r"(.*?)(?:\s*)\{(.*?)\}")
|
||||
|
||||
rows = []
|
||||
for line in response_lines:
|
||||
matches = pattern.findall(line.strip())
|
||||
for content, label in matches:
|
||||
rows.append([content.strip(), label.strip()])
|
||||
return rows
|
||||
|
||||
|
||||
def remove_trailing_speaker(df: pd.DataFrame) -> pd.DataFrame:
|
||||
if not df.empty and df.iloc[-1]['script_element'] == 'speaker':
|
||||
return df.iloc[:-1].reset_index(drop=True)
|
||||
return df.reset_index(drop=True)
|
|
@ -33,6 +33,8 @@ from auto_email.views import sendmail
|
|||
from MNF import settings
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.auth.models import User
|
||||
from Blockchain2.models import MNFServersFile
|
||||
from django.core.files import File as File2
|
||||
|
||||
|
||||
|
||||
|
@ -66,6 +68,7 @@ class Get_Counter(LoginRequiredMixin,APIView):
|
|||
|
||||
if current_site in ["qa.mynextfilm.net",
|
||||
"https://qa.mynextfilm.net",
|
||||
"example.com"
|
||||
]:
|
||||
testing_on_dev = True
|
||||
|
||||
|
@ -143,6 +146,8 @@ class Get_Counter(LoginRequiredMixin,APIView):
|
|||
|
||||
|
||||
def run_audit_in_counter(msg):
|
||||
print("################AUDIT DATA IS HERE\n\#######\n\n\n")
|
||||
print(msg)
|
||||
user = msg["user"]
|
||||
s3_url = msg["s3-file-path"]
|
||||
screenplay_name = msg["screenplay_name"]
|
||||
|
@ -152,7 +157,11 @@ def run_audit_in_counter(msg):
|
|||
script_file_name = msg["script_file_name"]
|
||||
language = "en"
|
||||
local_file_path = s3_url
|
||||
number_of_pages = msg["number_of_pages"]
|
||||
script_id = msg["script_id"] #new
|
||||
|
||||
script = Script.objects.get(id=script_id) #new
|
||||
file_instance = File.objects.get(script=script) #new
|
||||
|
||||
with open(local_file_path, 'rb') as file:
|
||||
file_content = file.read()
|
||||
|
@ -161,20 +170,20 @@ def run_audit_in_counter(msg):
|
|||
file_content,
|
||||
script_file_name,
|
||||
)
|
||||
|
||||
file_instance.file.save(script_file_name, file, save=True) #new
|
||||
user = User.objects.get(username=user)
|
||||
|
||||
result = filesystem.new_screenplay_without_audit_in_background(
|
||||
user,
|
||||
author,
|
||||
screenplay_name,
|
||||
file,
|
||||
"script-original",
|
||||
language,
|
||||
)
|
||||
# result = filesystem.new_screenplay_without_audit_in_background(
|
||||
# user,
|
||||
# author,
|
||||
# screenplay_name,
|
||||
# file,
|
||||
# "script-original",
|
||||
# language,
|
||||
# )
|
||||
|
||||
|
||||
script_id = result.get("script", {}).get("id")
|
||||
# script_id = result.get("script", {}).get("id")
|
||||
|
||||
file_to_original = File.objects.get(
|
||||
script=script_id,
|
||||
|
@ -191,19 +200,14 @@ def run_audit_in_counter(msg):
|
|||
id = script_id
|
||||
))
|
||||
audit_only.only_audit = True
|
||||
audit_only.number_of_pages = number_of_pages
|
||||
audit_only.save()
|
||||
|
||||
try:
|
||||
|
||||
audit = NeutralAudit(script_id)
|
||||
audit.audit_in_background()
|
||||
ScriptAuditModel.objects.update_or_create(
|
||||
script = Script.objects.get(
|
||||
id = script_id
|
||||
),
|
||||
defaults={"status" : "SUCCESS"}
|
||||
)
|
||||
|
||||
print("audit in background ran")
|
||||
|
||||
|
||||
except Exception as exp:
|
||||
|
@ -220,14 +224,49 @@ def run_audit_in_counter(msg):
|
|||
script = Script.objects.get(
|
||||
id = script_id
|
||||
))
|
||||
print("status is here",status.status)
|
||||
if status.status == "FAILURE":
|
||||
print("######Error from Audit############\n")
|
||||
status.error_msg = "AIAUDITinProgress"
|
||||
new_docx_script, unique_script_languages, unique_dialogue_languages = audit.audit_ai_script()
|
||||
|
||||
with open(new_docx_script, 'rb') as file:
|
||||
file_content = file.read()
|
||||
|
||||
file = ContentFile(
|
||||
file_content,
|
||||
script_file_name,
|
||||
)
|
||||
file_instance.file.save(script_file_name, file, save=True)
|
||||
file_to_original = File.objects.get(
|
||||
script=script_id,
|
||||
type="script-original"
|
||||
|
||||
)
|
||||
print("Save new docx script as script orginal")
|
||||
# update_audit_status(script_id, States.SUCCESS)
|
||||
status.status = States.SUCCESS
|
||||
status.error_msg = "AIAUDIT-Complete"
|
||||
status.screenplay_language = unique_script_languages
|
||||
status.dialogue_language = unique_dialogue_languages
|
||||
status.save()
|
||||
|
||||
blockchain_obj = UserCredentialsForBlockchain.objects.get(user=user)
|
||||
status.User_preference = blockchain_obj.FileSecureOn
|
||||
status.save()
|
||||
script_original= {}
|
||||
audit_data={}
|
||||
script_original["status"] = "STARTED"
|
||||
script_original["script_id"] = script_id
|
||||
with open(file_to_original.file.path, 'rb') as file:
|
||||
hash = uploadDataToIPFSNode(file)
|
||||
django_file = File2(file)
|
||||
filename = os.path.basename(file_to_original.file.path)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-original",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_original["script_file"] = hash
|
||||
script_original["type"] = "script-original"
|
||||
script_original["script_file_path"] = file_to_original.file.path
|
||||
|
@ -255,6 +294,13 @@ def run_audit_in_counter(msg):
|
|||
script_path1 = file_to_audit_docx.file.path
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2_docx = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-docx",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_docx["script_file_path"] = script_path1
|
||||
script_docx["script_file"] = hash2_docx
|
||||
script_docx["type"] = "script-docx"
|
||||
|
@ -287,6 +333,13 @@ def run_audit_in_counter(msg):
|
|||
script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2_docx = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-docx",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_docx["script_file_path"] = script_path1
|
||||
script_docx["script_file"] = hash2_docx
|
||||
script_docx["type"] = "script-docx"
|
||||
|
@ -312,6 +365,13 @@ def run_audit_in_counter(msg):
|
|||
# script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2 = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-pdf",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_pdf["script_file_path"] = script_path1
|
||||
script_pdf["script_file"] = hash2
|
||||
script_pdf["type"] = "script-pdf"
|
||||
|
@ -336,6 +396,13 @@ def run_audit_in_counter(msg):
|
|||
# script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2 = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-json",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_json["script_file_path"] = script_path1
|
||||
script_json["script_file"] = hash2
|
||||
script_json["type"] = "script-json"
|
||||
|
@ -351,47 +418,64 @@ def run_audit_in_counter(msg):
|
|||
print("script_file_path_is_here",script_path)
|
||||
with open(script_path, 'rb') as _file:
|
||||
hash1 = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-csv",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_csv["script_file"] = hash1
|
||||
script_csv["script_file_path"] = script_path
|
||||
script_csv["type"] = "script-csv"
|
||||
audit_data["script-csv"]= script_csv
|
||||
except Exception as exp:
|
||||
print(exp)
|
||||
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||
print("tx_hash",Response)
|
||||
transactioni_id = str(Response)
|
||||
status.transaction_hash =str(transactioni_id)
|
||||
status.save()
|
||||
to_email = [user.email]
|
||||
print("####### #### to_email",to_email)
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
email_code = 'SB1'
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud )
|
||||
print("$$$$### after sendmail")
|
||||
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||
# addition_result = user_infos.update_info(request)
|
||||
hash2_docx = hash_decrypation(hash2_docx)
|
||||
tx_id = Response
|
||||
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||
to_email = [user.email]
|
||||
email_code = 'BL1'
|
||||
# key_value = {
|
||||
# "service":"Audited Script",
|
||||
# "hash": hash2_docx,
|
||||
# "public key":blockchain_obj.publicKey,
|
||||
# "Transaction Hash": tx_id,
|
||||
# }
|
||||
key_value = {
|
||||
"Product Name": "Audited Script",
|
||||
"Profile url": f"{settings.SITE_DOMAIN}/memberpage/#/personaldetails",
|
||||
"User url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",
|
||||
"Product output card url": f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
}
|
||||
print("userkey = ", userkey)
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||
|
||||
if status.User_preference != "MNF":
|
||||
|
||||
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||
print("tx_hash",Response)
|
||||
transactioni_id = str(Response)
|
||||
status.transaction_hash =str(transactioni_id)
|
||||
status.save()
|
||||
to_email = [user.email]
|
||||
print("####### #### to_email",to_email)
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
email_code = 'SB1'
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud )
|
||||
print("$$$$### after sendmail")
|
||||
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||
# addition_result = user_infos.update_info(request)
|
||||
hash2_docx = hash_decrypation(hash2_docx)
|
||||
tx_id = Response
|
||||
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||
to_email = [user.email]
|
||||
email_code = 'BL1'
|
||||
# key_value = {
|
||||
# "service":"Audited Script",
|
||||
# "hash": hash2_docx,
|
||||
# "public key":blockchain_obj.publicKey,
|
||||
# "Transaction Hash": tx_id,
|
||||
# }
|
||||
key_value = {
|
||||
"Product Name": "Audited Script",
|
||||
"Profile url": f"{settings.SITE_DOMAIN}/memberpage/#/personaldetails",
|
||||
"User url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",
|
||||
"Product output card url": f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
}
|
||||
print("userkey = ", userkey)
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||
else:
|
||||
email_code = 'SB1'
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
||||
|
||||
data = {"message": "Success"}
|
||||
|
||||
else:
|
||||
|
@ -400,10 +484,13 @@ def run_audit_in_counter(msg):
|
|||
script=script_id,
|
||||
type="script-csv"
|
||||
)
|
||||
file_to_audit_report = File.objects.get(
|
||||
script=script_id,
|
||||
type="audit-report"
|
||||
)
|
||||
try:
|
||||
file_to_audit_report = File.objects.get(
|
||||
script=script_id,
|
||||
type="audit-report"
|
||||
)
|
||||
except:
|
||||
pass
|
||||
hash2 = ""
|
||||
hash2_docx = ""
|
||||
try:
|
||||
|
@ -415,6 +502,13 @@ def run_audit_in_counter(msg):
|
|||
script_path1 = file_to_audit_docx.file.path
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2_docx = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-docx",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_docx["script_file_path"] = script_path1
|
||||
script_docx["script_file"] = hash2_docx
|
||||
script_docx["type"] = "script-docx"
|
||||
|
@ -447,6 +541,13 @@ def run_audit_in_counter(msg):
|
|||
script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2_docx = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-docx",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_docx["script_file_path"] = script_path1
|
||||
script_docx["script_file"] = hash2_docx
|
||||
script_docx["type"] = "script-docx"
|
||||
|
@ -472,6 +573,13 @@ def run_audit_in_counter(msg):
|
|||
# script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2 = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-pdf",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_pdf["script_file_path"] = script_path1
|
||||
script_pdf["script_file"] = hash2
|
||||
script_pdf["type"] = "script-pdf"
|
||||
|
@ -495,6 +603,13 @@ def run_audit_in_counter(msg):
|
|||
# script_size = file_to_audit_docx.file.size
|
||||
with open(script_path1, 'rb') as _file:
|
||||
hash2 = uploadDataToIPFSNode(_file)
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path1)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-json",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_json["script_file_path"] = script_path1
|
||||
script_json["script_file"] = hash2
|
||||
script_json["type"] = "script-json"
|
||||
|
@ -508,7 +623,7 @@ def run_audit_in_counter(msg):
|
|||
|
||||
script_csv = {}
|
||||
audit_report ={}
|
||||
audit_report_path = file_to_audit_report.file.path
|
||||
|
||||
script_path = file_to_audit.file.path
|
||||
script_size = file_to_audit.file.size
|
||||
|
||||
|
@ -516,52 +631,79 @@ def run_audit_in_counter(msg):
|
|||
with open(script_path, 'rb') as _file:
|
||||
hash1 = uploadDataToIPFSNode(_file)
|
||||
script_csv["script_file"] = hash1
|
||||
django_file = File2(_file)
|
||||
filename = os.path.basename(script_path)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="script-csv",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
script_csv["script_file_path"] = script_path
|
||||
script_csv["type"] = "script-csv"
|
||||
with open(audit_report_path, 'rb') as file1:
|
||||
hash2 = uploadDataToIPFSNode(file1)
|
||||
audit_report["script_file"] = hash2
|
||||
audit_report["script_file_path"] = audit_report_path
|
||||
audit_report["type"] = "audit-report"
|
||||
audit_data["script-csv"]= script_csv
|
||||
audit_data["audit-report"]= audit_report
|
||||
|
||||
try:
|
||||
audit_report_path = file_to_audit_report.file.path
|
||||
with open(audit_report_path, 'rb') as file1:
|
||||
hash2 = uploadDataToIPFSNode(file1)
|
||||
django_file = File2(file1)
|
||||
filename = os.path.basename(audit_report_path)
|
||||
files, _ = MNFServersFile.objects.get_or_create(
|
||||
project_id= script_id,
|
||||
file_type="audit-report",
|
||||
)
|
||||
files.public_file.save(filename, django_file)
|
||||
audit_report["script_file"] = hash2
|
||||
audit_report["script_file_path"] = audit_report_path
|
||||
audit_report["type"] = "audit-report"
|
||||
audit_data["script-csv"]= script_csv
|
||||
audit_data["audit-report"]= audit_report
|
||||
except:
|
||||
pass
|
||||
if status.User_preference != "MNF":
|
||||
|
||||
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||
print("tx_hash",Response)
|
||||
transactioni_id = str(Response)
|
||||
status.transaction_hash =str(transactioni_id)
|
||||
status.save()
|
||||
Response,gasprice = UploadScriptAuditData(OWNER_KEY,blockchain_obj.publicKey,blockchain_obj.user_id,script_id,str(audit_data))
|
||||
print("tx_hash",Response)
|
||||
transactioni_id = str(Response)
|
||||
status.transaction_hash =str(transactioni_id)
|
||||
status.save()
|
||||
|
||||
to_email = [user.email]
|
||||
email_code = 'SB1'
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
||||
to_email = [user.email]
|
||||
email_code = 'SB1'
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
||||
|
||||
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||
# addition_result = user_infos.update_info(request)
|
||||
hash2_docx = hash_decrypation(hash2_docx)
|
||||
tx_id = Response
|
||||
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||
to_email = [user.email]
|
||||
email_code = 'BL1'
|
||||
# key_value = {
|
||||
# "service":"Audited Script",
|
||||
# "hash": hash2_docx,
|
||||
# "public key":blockchain_obj.publicKey,
|
||||
# "Transaction Hash": tx_id,
|
||||
# }
|
||||
key_value = {
|
||||
"Product Name": "Audited Script",
|
||||
"Profile url": f"{settings.SITE_DOMAIN}/memberpage/#/personaldetails",
|
||||
"User url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",
|
||||
"Product output card url": f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
}
|
||||
print("userkey = ", userkey)
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||
else:
|
||||
to_email = [user.email]
|
||||
email_code = 'SB1'
|
||||
key_value_aud = { "Script Name" : str(screenplay_name),
|
||||
"Audited script url" : f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
"User's profile url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",}
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value = key_value_aud)
|
||||
|
||||
# user_infos = user_info(tx_hash=Response,service="Script Audit",gas_fee=gasprice)
|
||||
# addition_result = user_infos.update_info(request)
|
||||
hash2_docx = hash_decrypation(hash2_docx)
|
||||
tx_id = Response
|
||||
certificate = certificateGenrate(user.username,"script audit",tx_id,projectname=script_file_name,matic=gasprice)
|
||||
to_email = [user.email]
|
||||
email_code = 'BL1'
|
||||
# key_value = {
|
||||
# "service":"Audited Script",
|
||||
# "hash": hash2_docx,
|
||||
# "public key":blockchain_obj.publicKey,
|
||||
# "Transaction Hash": tx_id,
|
||||
# }
|
||||
key_value = {
|
||||
"Product Name": "Audited Script",
|
||||
"Profile url": f"{settings.SITE_DOMAIN}/memberpage/#/personaldetails",
|
||||
"User url": f"{settings.SITE_DOMAIN}/memberpage/#/user/{user.id}/personaldetails",
|
||||
"Product output card url": f"{settings.SITE_DOMAIN}/audit/audits1",
|
||||
}
|
||||
print("userkey = ", userkey)
|
||||
sendmail(to_email=to_email , email_code=email_code, key_value=key_value, filePath=certificate)
|
||||
print("mail send sucessfully:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
|
||||
data = {"message": "Success"}
|
||||
# deleting the folder and files
|
||||
|
||||
|
@ -692,13 +834,16 @@ class AuditedScriptsView_Without_blockchain(LoginRequiredMixin, APIView):
|
|||
no_of_pages = audit_status_object.number_of_pages
|
||||
screenplay_language = audit_status_object.screenplay_language
|
||||
dialogue_language = audit_status_object.dialogue_language
|
||||
isfdx = audit_status_object.isfdx
|
||||
dont_show_audit_report = audit_status_object.isfdx
|
||||
if audit_status_object.error_msg == "AIAUDIT-Complete":
|
||||
dont_show_audit_report = True
|
||||
# transaction_haash = audit_status_object.transaction_hash
|
||||
# privatekeycnf = audit_status_object.bchain_privatekey
|
||||
privatekeycnf = "dhjdjfhd"
|
||||
print("SCRIPT ID = ", str(script.id))
|
||||
print("extimated time = ", extimated_time)
|
||||
print("isfdx", isfdx)
|
||||
# print("isfdx", isfdx)
|
||||
print("dont_show_audit_report", dont_show_audit_report)
|
||||
print("no of pages = ", no_of_pages)
|
||||
print("screenplay language = ", screenplay_language)
|
||||
print("dialogue_language =", dialogue_language)
|
||||
|
@ -717,7 +862,7 @@ class AuditedScriptsView_Without_blockchain(LoginRequiredMixin, APIView):
|
|||
"page_number" : str(no_of_pages),
|
||||
"transaction_hash" : str(1),
|
||||
"confirmkey": str(privatekeycnf),
|
||||
"isfdx" : str(isfdx)
|
||||
"dont_show_audit_report" : str(dont_show_audit_report),
|
||||
}
|
||||
)
|
||||
elif script_audit_status == States.FAILURE:
|
||||
|
@ -869,6 +1014,20 @@ class DownloadScriptFromBlockchain(APIView):
|
|||
if not request.query_params.get("type") in self.VALID_FILE_TYPES:
|
||||
raise spex.IllegalFiletype(file_type, self.VALID_FILE_TYPES)
|
||||
|
||||
try:
|
||||
status = ScriptAuditModel.objects.get(script = Script.objects.get(id = script_id))
|
||||
if status.User_preference == "MNF":
|
||||
file = File.objects.get(script=script_id, type=file_type)
|
||||
if file_type == "script-json":
|
||||
open_file = open(file.file.path, 'r')
|
||||
script_json = json.load(open_file)
|
||||
return JsonResponse({"script-json": script_json}, status=200)
|
||||
return JsonResponse({"status":True, "download_link": file.file.url, "file_name": str(file.file.name)})
|
||||
except Exception as exp:
|
||||
print("###ERROR:",exp)
|
||||
return JsonResponse({"status":False, "error": "The File Does Not Exist On The MNF",},status=500)
|
||||
|
||||
|
||||
try:
|
||||
if UserCredentialsForBlockchain.objects.filter(user=request.user).exists():
|
||||
blockchain_obj = UserCredentialsForBlockchain.objects.get(user=request.user)
|
||||
|
|
Loading…
Reference in New Issue