adding new changes of audit mnf_script_audit,sa_fun, sa_func_end and views

This commit is contained in:
Ubuntu 2024-09-03 12:07:33 +00:00
parent e7f53ba555
commit be7d3283ed
4 changed files with 747 additions and 378 deletions

View File

@ -14,15 +14,23 @@ from io import BytesIO
import datetime
import pytz
import subprocess
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
pd.options.mode.copy_on_write = False
pd.options.mode.chained_assignment = None
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=RuntimeWarning)
# from django_q.tasks import async_task
# from django_q.brokers import Broker
class NeutralAudit:
def __init__(
self,
script_id: str = None,
log: bool = True,
log: bool = False,
) -> None:
"""
To Audit a Script already uploded.
@ -42,47 +50,35 @@ class NeutralAudit:
Return :
None
"""
# pd.options.mode.copy_on_write = False
# pd.options.mode.chained_assignment = None
self.start_time_count = time.time()
print("<<<<<<<<<<<<<<<<<<<<<<<<<")
self.matrices_path = str(Path(__file__).resolve().parent) + "/matrices/"
self.total_time_file = str(Path(__file__).resolve().parent)
print(script_id,"SCRIPT-ID IS HERE|| AYYA")
self.script_id = script_id
audit_root_dir = (
str(Path(__file__).resolve().parent.parent) + "/media/audit_folder/"
)
audit_root_dir = (str(Path(__file__).resolve().parent.parent) + "/media/audit_folder/")
self.script_name = str(self.script_id)
# self.total_line_before_audit = 1
output_dir = os.path.join(audit_root_dir, self.script_name)
t_time_file = self.total_time_file + "/tail_errors.txt"
# with open(t_time_file, "a") as file008:
# file008.write(str(self.start_time_count))
file_to_audit = File.objects.get(
script=script_id,
type="script-original",
)
self.input_script = file_to_audit.file.path
if not os.path.exists(output_dir):
try:
os.mkdir(output_dir)
os.makedirs(output_dir,exist_ok=True)
except Exception as exp:
print(repr(exp))
subprocess.run(["mkdir", output_dir])
subprocess.run(["chmod", "777", output_dir])
##print(output_dir)
self.base_file_path = str(output_dir) + "/"
self.csv_removed_space_between_words = (
self.base_file_path + "space_between_words_removed.csv"
)
self.csv_removed_space_between_words = (self.base_file_path + "space_between_words_removed.csv")
self.audit_report_csv = self.base_file_path + "audit_spreadsheet.csv"
sys.stdout = open(os.devnull, "w")
if log:
log_file = self.base_file_path + "_log.txt"
@ -91,15 +87,7 @@ class NeutralAudit:
else:
self.gen_int_files = False
sys.stdout = sys.__stdout__
self.audit_model_obj = ScriptAuditModel.objects.get(
script = Script.objects.get(
id = self.script_id,
)
)
self.audit_model_obj = ScriptAuditModel.objects.get(script = Script.objects.get(id = self.script_id,))
time_file = self.base_file_path + "time_taken.txt"
start_time = datetime.datetime.now()
print(start_time)
@ -1129,6 +1117,11 @@ class NeutralAudit:
#if self.gen_int_files:
df.to_csv(csv_after_gen_and_sort_weights, index = False)
##
## remove some columns
df.drop(['first_largest', 'second_largest','third_largest','fourth_largest','fifth_largest','sixth_largest','seventh_largest','eight_largest','ninth_largest','tenth_largest','eleventh_largest','twelth_largest','thirteenth_largest','fourteenth_largest','fifteenth_largest','sixteenth_largest','seventeenth_largest','eighteenth_largest','ninteenth_largest','tewenty_largest','tone_largest','ttwo_largest','tthree_largest','tfour_largest','tfive_largest','tsix_largest','tseven_largest','teight_largest'], axis=1, inplace=True)
sf.prep_for_pos_elimination(df)
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("AFTER sf.prep_for_pos_elimination 5 : AFTER ASSIGNING LOCATIONS AUDIT :audit\n")
@ -2200,6 +2193,7 @@ class NeutralAudit:
# audit_report_buffer = sf.print_audit_report_tabular_docx(audit_df,line_count_before_audit,line_count_after_audit) #commented on 13-09-23
para_filetered_audut_df = sf.assign_para_no(audit_df)
print("after para assign")
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("AFTER para_filetered_audut_df\n")
@ -2256,15 +2250,21 @@ class NeutralAudit:
except:
print("page number and language insertion failed")
pass
auditdf_before_table_creation = self.base_file_path + "auditbefore_table.csv"
para_filetered_audut_df.to_csv(auditdf_before_table_creation)
print("B4 audit report buffer")
audit_report_buffer = sf.print_audit_report_tabular_docx(para_filetered_audut_df,scriptname,author,pre_audit_pagenumber,postauditpagenumber,preaudit_line_no,postaudit_line_no,script_language,dialogue_language)
print("after audit buffer")
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("AFTER sf.print_audit_report_tabular_docx 87 : AFTER ASSIGNING LOCATIONS AUDIT :audit\n")
# audit_report_path = self.base_file_path + "audit_report_doc.docx"
# report_data = Document(audit_report_buffer)
# report_data.save(audit_report_path)
req_file = ContentFile(audit_report_buffer.read(), audit_report_name)
print("going into models :- ")
try:
req_file = ContentFile(audit_report_buffer.read(), audit_report_name)
except Exception as exp:
print(repr(exp))
print("repoo")
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("AFTER sf.print_audit_report_tabular_docx 87 : AFTER ASSIGNING LOCATIONS AUDIT :audit\n"+str(req_file))
@ -2318,6 +2318,38 @@ class NeutralAudit:
def script_meta(self):
pass
def audit_ai_gen_script(self,lang: str = None):
df, _ = self.before_audit(lang)
para_df = pd.DataFrame()
df = sf_eng.ai_gen_script_to_audited_df(df)
para_df = sf.merge_line_to_para(df)
try:
para_df.to_csv(self.base_file_path+ "after_merge_line_para.csv", index = False)
print("para_df is written")
except:
pass
audited_file_name = self.script_name + ".csv"
req_file = ContentFile(
(para_df.to_csv(index=False, path_or_buf=None)).encode("utf-8"),
audited_file_name,
)
## for local - uncomment
print("\n\n the code is here\n\n")
File.objects.create(
script=Script.objects.get(id=self.script_id),
type="script-csv",
file=req_file,
)
print("\n\n @@@@#$$$$$$$$ csv saved from s2s\n\n")
def audit_fdx(self):
# fdx to audited csv
@ -2461,7 +2493,7 @@ class NeutralAudit:
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("\nafter quick audit : AFTER ASSIGNING LOCATIONS AUDIT :audit\n")
print(quick_audit_flag)
# print(quick_audit_flag)
def get_character_list(self, lang: str = None):
@ -2546,7 +2578,7 @@ class NeutralAudit:
output_dir = os.path.join(self.base_file_path, foldername)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
os.makedirs(output_dir, exist_ok=True)
df_after_audit = pd.read_csv(audited_linewise_csv)
df_after_audit.fillna("", inplace=True)
@ -2679,7 +2711,8 @@ class NeutralAudit:
data = df_after_audit["data"][index]
try:
print(data)
# print(data)
pass
except:
pass
fout.writelines(str(data))
@ -2700,7 +2733,7 @@ class NeutralAudit:
return character_scripts_dict
def audit_in_background(self):
# # commenting os.fork to make code run in foreground
# if os.fork() != 0:
# return
@ -2715,10 +2748,10 @@ class NeutralAudit:
self.audit_model_obj.status = States.SUCCESS
self.audit_model_obj.save()
print("Audit Success!!!!!!!!!!!!!!!!!!!!!!!")
end_time = datetime.datetime.now()
# end_time = datetime.datetime.now()
with open(self.base_file_path + "time_taken.txt", "a") as file007:
file007.write("\n\n****AUDITING IS SUCCESSFUL****\n")
print(end_time)
# print(end_time)
except Exception as exp:
@ -2726,7 +2759,8 @@ class NeutralAudit:
self.audit_model_obj.results = exp
self.audit_model_obj.error_msg = "FAILED"
self.audit_model_obj.save()
print(end_time)
# print(end_time)
if __name__ == "__main__":
naudit = NeutralAudit("123", True)

File diff suppressed because it is too large Load Diff

View File

@ -702,3 +702,53 @@ def run_audit_on_identified_english(df,audit_df):
#df = df.sort_values(by=['line_no']).reset_index(drop =True)
return df
def ai_gen_script_to_audited_df(df):
for index in df.index:
if df['isIdentified'][index] == 'Yes' or df['Identification_Status'][index] == 'blank' :
continue
if str(df['data'][index]).strip() == "" :
df['isIdentified'][index] = 'Yes'
df['Identification_Status'][index] = 'blank'
continue
if df['data'][index].startswith('INT.') or df['data'][index].startswith('EXT.') :
df['Identification_Status'][index] = 'ps1'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'Y' and df['plb'][index] == 'Y' and df['case'][index] == 'AllUpper':
df['Identification_Status'][index] = 'ps16'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'Y' and df['plb'][index] == 'Y' :
df['Identification_Status'][index] = 'ps6'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'Y' and df['plb'][index] == 'N' :
df['Identification_Status'][index] = 'ps15'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'N' and df['plb'][index] == 'Y' and df['parenthetical'][index] == 'PartMidEnd':
df['Identification_Status'][index] = 'ps8'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'N' and df['plb'][index] == 'Y' and df['parenthetical'][index] == 'Absent':
df['Identification_Status'][index] = 'ps7'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
if df['nlb'][index] == 'N' and df['plb'][index] == 'N' and df['parenthetical'][index] == 'Complete':
df['Identification_Status'][index] = 'ps10'
df['isIdentified'][index] = 'Yes'
#print(df['data'][index])
continue
## identify unidentified as actions
df['Identification_Status'][index] = 'ps6'
df['isIdentified'][index] = 'Yes'
return df

View File

@ -48,6 +48,7 @@ class Get_Counter(LoginRequiredMixin,APIView):
print("CURREENT DOMAIN :-----")
print(current_site)
running_in_production = True
testing_on_dev = False
if current_site in ["http://1.6.141.108",
"http://1.6.141.104",
"http://1.6.141.103",
@ -58,18 +59,31 @@ class Get_Counter(LoginRequiredMixin,APIView):
"1.6.141.103",
"1.6.141.106",
"taj.mynextfilm.in",
"qa.mynextfilm.net",
"https://qa.mynextfilm.net",
# "qa.mynextfilm.net",
# "https://qa.mynextfilm.net",
]:
running_in_production = False
if current_site in ["qa.mynextfilm.net",
"https://qa.mynextfilm.net",
]:
testing_on_dev = True
session = boto3.Session(
aws_access_key_id='AKIAQVLBBGCB45RMLKVW',
aws_secret_access_key='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',
)
sqs = session.resource('sqs', region_name='ap-south-1')
queue = sqs.get_queue_by_name(QueueName="mnfqueue")
if testing_on_dev == True:
print("#######\n\n\n")
print("Sending files to Development server\n\n\n\n")
queue = sqs.get_queue_by_name(QueueName="devqueue")
else:
queue = sqs.get_queue_by_name(QueueName="mnfqueue")
user = str(request.user)
screenplay_name = request.data.get('screenplay_name')
author = request.data.get('author_name')
@ -97,10 +111,12 @@ class Get_Counter(LoginRequiredMixin,APIView):
print(response)
except Exception as e:
print("Error is", e)
print("#######\n\n\n")
print("Sending files to Production server\n\n\n\n")
s3_url = f"https://{bucket}.s3.ap-south-1.amazonaws.com/{object_name}"
else:
print("#######\n\n\n")
print("Sending files to Local server\n\n\n\n")
s3_url = media_path
audit_parameters = {
"service_type" : "audit",
@ -531,27 +547,27 @@ def run_audit_in_counter(msg):
# deleting the folder and files
# Delete a file
media_path = os.path.join(settings.MEDIA_ROOT, "audit_counter_files", script_file_name)
if os.path.exists(media_path):
os.remove(media_path)
print(f"File '{media_path}' deleted successfully.")
else:
print(f"File '{media_path}' does not exist.")
# media_path = os.path.join(settings.MEDIA_ROOT, "audit_counter_files", script_file_name)
# if os.path.exists(media_path):
# os.remove(media_path)
# print(f"File '{media_path}' deleted successfully.")
# else:
# print(f"File '{media_path}' does not exist.")
# Delete a folder and its contents
folder1_path = os.path.join(settings.MEDIA_ROOT, "scripts_folder", script_id)
if os.path.exists(folder1_path):
shutil.rmtree(folder1_path)
print(f"Folder '{folder1_path}' and its contents deleted successfully.")
else:
print(f"Folder '{folder1_path}' does not exist.")
# # Delete a folder and its contents
# folder1_path = os.path.join(settings.MEDIA_ROOT, "scripts_folder", script_id)
# if os.path.exists(folder1_path):
# shutil.rmtree(folder1_path)
# print(f"Folder '{folder1_path}' and its contents deleted successfully.")
# else:
# print(f"Folder '{folder1_path}' does not exist.")
folder2_path = os.path.join(settings.MEDIA_ROOT, "audit_folder", script_id)
if os.path.exists(folder2_path):
shutil.rmtree(folder2_path)
print(f"Folder '{folder2_path}' and its contents deleted successfully.")
else:
print(f"Folder '{folder2_path}' does not exist.")
# folder2_path = os.path.join(settings.MEDIA_ROOT, "audit_folder", script_id)
# if os.path.exists(folder2_path):
# shutil.rmtree(folder2_path)
# print(f"Folder '{folder2_path}' and its contents deleted successfully.")
# else:
# print(f"Folder '{folder2_path}' does not exist.")
return JsonResponse(data, status=200)
# return Response("Success", status=200)
@ -836,7 +852,7 @@ class DownloadScriptFromBlockchain(APIView):
else:
return JsonResponse({"status":False, "error": "This Transcation Is Not Found On The Blockchain.",},status=500)
else:
return JsonResponse({"status":False, "error": "Your Private Key Is Worng", "key": str(userkeys)},status=500)
return JsonResponse({"status":False, "error": "Your Private Key Is Wrong", "key": str(userkeys)},status=500)
else:
return JsonResponse({"status":False, "error": "Your Wallet is Not Created",},status=500)
except Exception as e: