From 603760a4a43bd63b1c66173ff3bf1f2a499b8cc7 Mon Sep 17 00:00:00 2001 From: Horilla Date: Fri, 11 Apr 2025 10:28:03 +0530 Subject: [PATCH] [UPDT] EMPLOYEE: Updated employee import method --- employee/forms.py | 5 +- employee/methods/methods.py | 281 +++++- employee/static/employee/actions.js | 1 + employee/static/employee/importExport.js | 420 +++----- .../templates/employee/employee_import.html | 49 + employee/templates/employee_nav.html | 911 +++++++----------- employee/urls.py | 5 + employee/views.py | 449 ++++----- 8 files changed, 990 insertions(+), 1131 deletions(-) create mode 100644 employee/templates/employee/employee_import.html diff --git a/employee/forms.py b/employee/forms.py index 48b39a49a..94fb67c68 100644 --- a/employee/forms.py +++ b/employee/forms.py @@ -492,8 +492,11 @@ excel_columns = [ ("employee_work_info__work_type_id", trans("Work Type")), ("employee_work_info__reporting_manager_id", trans("Reporting Manager")), ("employee_work_info__employee_type_id", trans("Employee Type")), - ("employee_work_info__location", trans("Work Location")), + ("employee_work_info__location", trans("Location")), ("employee_work_info__date_joining", trans("Date Joining")), + ("employee_work_info__basic_salary", trans("Basic Salary")), + ("employee_work_info__salary_hour", trans("Salary Hour")), + ("employee_work_info__contract_end_date", trans("Contract End Date")), ("employee_work_info__company_id", trans("Company")), ("employee_bank_details__bank_name", trans("Bank Name")), ("employee_bank_details__branch", trans("Branch")), diff --git a/employee/methods/methods.py b/employee/methods/methods.py index e3c8d0837..0370da34f 100644 --- a/employee/methods/methods.py +++ b/employee/methods/methods.py @@ -3,14 +3,16 @@ employee/methods.py """ import logging +import re import threading -from datetime import datetime +from datetime import date, datetime from itertools import groupby import pandas as pd from django.apps import apps from django.contrib.auth.models import User from django.db import models +from django.utils.translation import gettext as _ from base.context_processors import get_initial_prefix from base.models import ( @@ -26,6 +28,72 @@ from employee.models import Employee, EmployeeWorkInformation logger = logging.getLogger(__name__) +error_data_template = { + field: [] + for field in [ + "Badge ID", + "First Name", + "Last Name", + "Phone", + "Email", + "Gender", + "Department", + "Job Position", + "Job Role", + "Work Type", + "Shift", + "Employee Type", + "Reporting Manager", + "Company", + "Location", + "Date Joining", + "Contract End Date", + "Basic Salary", + "Salary Hour", + "Email Error", + "First Name Error", + "Name and Email Error", + "Phone Error", + "Gender Error", + "Joining Date Error", + "Contract Date Error", + "Badge ID Error", + "Basic Salary Error", + "Salary Hour Error", + "User ID Error", + "Company Error", + ] +} + + +def normalize_phone(phone): + phone = str(phone).strip() + if phone.startswith("+"): + return "+" + re.sub(r"\D", "", phone[1:]) + return re.sub(r"\D", "", phone) + + +def import_valid_date(date_value, field_label, errors_dict, error_key): + if pd.isna(date_value) or date_value is None or str(date_value).strip() == "": + return None + + if isinstance(date_value, datetime): + return date_value.date() + + date_str = str(date_value).strip() + date_formats = ["%Y-%m-%d", "%d/%m/%Y", "%m/%d/%Y"] + + for fmt in date_formats: + try: + return datetime.strptime(date_str, fmt).date() + except ValueError: + continue + + errors_dict[error_key] = ( + f"{field_label} is not a valid date. Expected formats: YYYY-MM-DD, DD/MM/YYYY" + ) + return None + def convert_nan(field, dicts): """ @@ -117,6 +185,172 @@ def check_relationship_with_employee_model(model): return related_fields +def valid_import_file_headers(data_frame): + if data_frame.empty: + message = _("The uploaded file is empty, Not contain records.") + return False, message + + required_keys = [ + "Badge ID", + "First Name", + "Last Name", + "Phone", + "Email", + "Gender", + "Department", + "Job Position", + "Job Role", + "Work Type", + "Shift", + "Employee Type", + "Reporting Manager", + "Company", + "Location", + "Date Joining", + "Contract End Date", + "Basic Salary", + "Salary Hour", + ] + + missing_keys = [key for key in required_keys if key not in data_frame.columns] + if missing_keys: + message = _( + "These required headers are missing in the uploaded file: " + ) + ", ".join(missing_keys) + return False, message + return True, "" + + +def process_employee_records(data_frame): + created_count = 0 + success_list, error_list = [], [] + employee_dicts = data_frame.to_dict("records") + email_regex = re.compile(r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$") + phone_regex = re.compile(r"^\+?\d{10,15}$") + allowed_genders = {choice[0] for choice in Employee.choice_gender} + + existing_badge_ids = set(Employee.objects.values_list("badge_id", flat=True)) + existing_usernames = set(User.objects.values_list("username", flat=True)) + existing_name_emails = set( + Employee.objects.values_list( + "employee_first_name", "employee_last_name", "email" + ) + ) + existing_companies = set(Company.objects.values_list("company", flat=True)) + + for emp in employee_dicts: + errors, save = {}, True + + email = emp.get("Email", "").strip() + raw_phone = emp.get("Phone", "") + phone = normalize_phone(raw_phone) + badge_id = str(emp.get("Badge ID", "") or "").strip() + first_name = convert_nan("First Name", emp) + last_name = convert_nan("Last Name", emp) + gender = emp.get("Gender", "").strip().lower() + company = convert_nan("Company", emp) + basic_salary = convert_nan("Basic Salary", emp) + salary_hour = convert_nan("Salary Hour", emp) + + joining_date = import_valid_date( + emp.get("Date Joining"), "Joining Date", errors, "Joining Date Error" + ) + if "Joining Date Error" in errors: + save = False + if joining_date and joining_date > date.today(): + errors["Joining Date Error"] = "Joining date cannot be in the future." + save = False + + contract_end_date = import_valid_date( + emp.get("Contract End Date"), + "Contract End Date", + errors, + "Contract Date Error", + ) + if "Contract Error" in errors: + save = False + if contract_end_date and joining_date and contract_end_date < joining_date: + errors["Contract Date Error"] = ( + "Contract end date cannot be before joining date." + ) + save = False + + if not email or not email_regex.match(email): + errors["Email Error"] = "Invalid email address." + save = False + + if not first_name: + errors["First Name Error"] = "First name cannot be empty." + save = False + + if not phone_regex.match(phone): + errors["Phone Error"] = "Invalid phone number format." + save = False + + if badge_id in existing_badge_ids: + errors["Badge ID Error"] = "An employee with this badge ID already exists." + save = False + else: + existing_badge_ids.add(badge_id) + + if email in existing_usernames: + errors["User ID Error"] = "User with this email already exists." + save = False + else: + existing_usernames.add(email) + + name_email_tuple = (first_name, last_name, email) + if name_email_tuple in existing_name_emails: + errors["Name and Email Error"] = ( + "This employee already exists in the system." + ) + save = False + else: + existing_name_emails.add(name_email_tuple) + + if gender and gender not in allowed_genders: + errors["Gender Error"] = ( + f"Invalid gender. Allowed values: {', '.join(allowed_genders)}." + ) + save = False + + if company and company not in existing_companies: + errors["Company Error"] = f"Company '{company}' does not exist." + save = False + + if basic_salary not in [None, ""]: + try: + basic_salary_val = float(basic_salary) + if basic_salary_val <= 0: + raise ValueError + except (ValueError, TypeError): + errors["Basic Salary Error"] = "Basic salary must be a positive number." + save = False + + if salary_hour not in [None, ""]: + try: + salary_hour_val = float(salary_hour) + if salary_hour_val < 0: + raise ValueError + except (ValueError, TypeError): + errors["Salary Hour Error"] = ( + "Salary hour must be a non-negative number." + ) + save = False + + if save: + emp["Phone"] = phone + emp["Date Joining"] = joining_date + emp["Contract End Date"] = contract_end_date + success_list.append(emp) + created_count += 1 + else: + emp.update(errors) + error_list.append(emp) + + return success_list, error_list, created_count + + def bulk_create_user_import(success_lists): """ Bulk creation of user instances based on the excel import of employees @@ -166,7 +400,7 @@ def bulk_create_employee_import(success_lists): if not user: continue - badge_id = work_info["Badge id"] + badge_id = work_info["Badge ID"] first_name = convert_nan("First Name", work_info) last_name = convert_nan("Last Name", work_info) phone = work_info["Phone"] @@ -203,7 +437,7 @@ def set_initial_password(employees): logger.info("initial password configured") -def optimize_reporting_manager_lookup(success_lists): +def optimize_reporting_manager_lookup(): """ Optimizes the lookup of reporting managers from a list of work information. @@ -212,21 +446,8 @@ def optimize_reporting_manager_lookup(success_lists): single database query, and creates a dictionary for quick lookups based on the full name of the reporting managers. """ - # Step 1: Collect unique reporting manager names - unique_managers = set() - for work_info in success_lists: - reporting_manager = convert_nan("Reporting Manager", work_info) - if isinstance(reporting_manager, str) and " " in reporting_manager: - unique_managers.add(reporting_manager) + employees = Employee.objects.entire() - # Step 2: Query all relevant Employee objects in one go - manager_names = list(unique_managers) - employees = Employee.objects.filter( - employee_first_name__in=[name.split(" ")[0] for name in manager_names], - employee_last_name__in=[name.split(" ")[1] for name in manager_names], - ) - - # Step 3: Create a dictionary for quick lookups employee_dict = { f"{employee.employee_first_name} {employee.employee_last_name}": employee for employee in employees @@ -434,8 +655,7 @@ def bulk_create_work_info_import(success_lists): new_work_info_list = [] update_work_info_list = [] - # Filtered data for required lookups - badge_ids = [row["Badge id"] for row in success_lists] + badge_ids = [row["Badge ID"] for row in success_lists] departments = set(row.get("Department") for row in success_lists) job_positions = set(row.get("Job Position") for row in success_lists) job_roles = set(row.get("Job Role") for row in success_lists) @@ -444,7 +664,6 @@ def bulk_create_work_info_import(success_lists): shifts = set(row.get("Shift") for row in success_lists) companies = set(row.get("Company") for row in success_lists) - # Bulk fetch related objects and reduce repeated DB calls existing_employees = { emp.badge_id: emp for emp in Employee.objects.entire() @@ -495,17 +714,25 @@ def bulk_create_work_info_import(success_lists): comp.company: comp for comp in Company.objects.filter(company__in=companies).only("company") } - reporting_manager_dict = optimize_reporting_manager_lookup(success_lists) + reporting_manager_dict = optimize_reporting_manager_lookup() + for work_info in success_lists: email = work_info["Email"] - badge_id = work_info["Badge id"] + badge_id = work_info["Badge ID"] department_obj = existing_departments.get(work_info.get("Department")) - key = ( + + job_position_key = ( existing_departments.get(work_info.get("Department")), work_info.get("Job Position"), ) - job_position_obj = existing_job_positions.get(key) - job_role_obj = existing_job_roles.get(work_info.get("Job Role")) + job_position_obj = existing_job_positions.get(job_position_key) + + job_role_key = ( + job_position_obj, + work_info.get("Job Role"), + ) + job_role_obj = existing_job_roles.get(job_role_key) + work_type_obj = existing_work_types.get(work_info.get("Work Type")) employee_type_obj = existing_employee_types.get(work_info.get("Employee Type")) shift_obj = existing_shifts.get(work_info.get("Shift")) @@ -520,8 +747,8 @@ def bulk_create_work_info_import(success_lists): # Parsing dates and salary date_joining = ( - work_info["Date joining"] - if not pd.isnull(work_info["Date joining"]) + work_info["Date Joining"] + if not pd.isnull(work_info["Date Joining"]) else datetime.today() ) diff --git a/employee/static/employee/actions.js b/employee/static/employee/actions.js index d71409d03..98c4fe4da 100644 --- a/employee/static/employee/actions.js +++ b/employee/static/employee/actions.js @@ -492,6 +492,7 @@ $("#deleteEmployees").click(function (e) { }).then(function (result) { if (result.isConfirmed) { e.preventDefault(); + $("#view-container").html(`
`); ids = []; ids.push($("#selectedInstances").attr("data-ids")); diff --git a/employee/static/employee/importExport.js b/employee/static/employee/importExport.js index 944aee072..190f82084 100644 --- a/employee/static/employee/importExport.js +++ b/employee/static/employee/importExport.js @@ -1,321 +1,179 @@ var downloadMessages = { - ar: "هل ترغب في تنزيل القالب؟", - de: "Möchten Sie die Vorlage herunterladen?", - es: "¿Quieres descargar la plantilla?", - en: "Do you want to download the template?", - fr: "Voulez-vous télécharger le modèle ?", + ar: "هل ترغب في تنزيل القالب؟", + de: "Möchten Sie die Vorlage herunterladen?", + es: "¿Quieres descargar la plantilla?", + en: "Do you want to download the template?", + fr: "Voulez-vous télécharger le modèle ?", }; var importSuccess = { - ar: "نجح الاستيراد", // Arabic - de: "Import erfolgreich", // German - es: "Importado con éxito", // Spanish - en: "Imported Successfully!", // English - fr: "Importation réussie", // French + ar: "نجح الاستيراد", // Arabic + de: "Import erfolgreich", // German + es: "Importado con éxito", // Spanish + en: "Imported Successfully!", // English + fr: "Importation réussie", // French }; var uploadSuccess = { - ar: "تحميل كامل", // Arabic - de: "Upload abgeschlossen", // German - es: "Carga completa", // Spanish - en: "Upload Complete!", // English - fr: "Téléchargement terminé", // French + ar: "تحميل كامل", // Arabic + de: "Upload abgeschlossen", // German + es: "Carga completa", // Spanish + en: "Upload Complete!", // English + fr: "Téléchargement terminé", // French }; var uploadingMessage = { - ar: "جارٍ الرفع", - de: "Hochladen...", - es: "Subiendo...", - en: "Uploading...", - fr: "Téléchargement en cours...", + ar: "جارٍ الرفع", + de: "Hochladen...", + es: "Subiendo...", + en: "Uploading...", + fr: "Téléchargement en cours...", }; var validationMessage = { - ar: "يرجى تحميل ملف بامتداد .xlsx فقط.", - de: "Bitte laden Sie nur eine Datei mit der Erweiterung .xlsx hoch.", - es: "Por favor, suba un archivo con la extensión .xlsx solamente.", - en: "Please upload a file with the .xlsx extension only.", - fr: "Veuillez télécharger uniquement un fichier avec l'extension .xlsx.", + ar: "يرجى تحميل ملف بامتداد .xlsx فقط.", + de: "Bitte laden Sie nur eine Datei mit der Erweiterung .xlsx hoch.", + es: "Por favor, suba un archivo con la extensión .xlsx solamente.", + en: "Please upload a file with the .xlsx extension only.", + fr: "Veuillez télécharger uniquement un fichier avec l'extension .xlsx.", }; function getCookie(name) { - let cookieValue = null; - if (document.cookie && document.cookie !== "") { - const cookies = document.cookie.split(";"); - for (let i = 0; i < cookies.length; i++) { - const cookie = cookies[i].trim(); - // Does this cookie string begin with the name we want? - if (cookie.substring(0, name.length + 1) === name + "=") { - cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); - break; - } + let cookieValue = null; + if (document.cookie && document.cookie !== "") { + const cookies = document.cookie.split(";"); + for (let i = 0; i < cookies.length; i++) { + const cookie = cookies[i].trim(); + // Does this cookie string begin with the name we want? + if (cookie.substring(0, name.length + 1) === name + "=") { + cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); + break; + } + } } - } - return cookieValue; + return cookieValue; } function getCurrentLanguageCode(callback) { - var languageCode = $("#main-section-data").attr("data-lang"); - var allowedLanguageCodes = ["ar", "de", "es", "en", "fr"]; - if (allowedLanguageCodes.includes(languageCode)) { - callback(languageCode); - } else { - $.ajax({ - type: "GET", - url: "/employee/get-language-code/", - success: function (response) { - var ajaxLanguageCode = response.language_code; - $("#main-section-data").attr("data-lang", ajaxLanguageCode); - callback( - allowedLanguageCodes.includes(ajaxLanguageCode) - ? ajaxLanguageCode - : "en" - ); - }, - error: function () { - callback("en"); - }, - }); - } -} - -// Get the form element -var form = document.getElementById("workInfoImportForm"); - -// Add an event listener to the form submission -form.addEventListener("submit", function (event) { - // Prevent the default form submission - event.preventDefault(); - - // Create a new form data object - $(".oh-dropdown__import-form").css("display", "none"); - $("#uploading").css("display", "block"); - var formData = new FormData(); - - // Append the file to the form data object - var fileInput = document.querySelector("#workInfoImportFile"); - formData.append("file", fileInput.files[0]); - $.ajax({ - type: "POST", - url: "/employee/work-info-import", - dataType: "binary", - data: formData, - processData: false, - contentType: false, - headers: { - "X-CSRFToken": getCookie("csrftoken"), - }, - xhrFields: { - responseType: "blob", - }, - success: function (response, textStatus, xhr) { - var errorCount = xhr.getResponseHeader('X-Error-Count'); - if (typeof response === 'object' && response.type == 'application/json') { - var reader = new FileReader(); - - reader.onload = function() { - var json = JSON.parse(reader.result); - - if(json.success_count > 0) { - Swal.fire({ - text: `${json.success_count} Employees Imported Successfully`, - icon: "success", - showConfirmButton: false, - timer: 3000, - timerProgressBar: true, - }).then(function() { - window.location.reload(); - }); - } - } - reader.readAsText(response); - return; - } - if (!$(".file-xlsx-validation").length) { - swal.fire({ - text: `You have ${errorCount} errors. Do you want to download the error list?`, - icon: "error", - showCancelButton: true, - showDenyButton: true, - confirmButtonText: "Download error list & Skip Import", - denyButtonText: "Downlod error list & Continue Import", - cancelButtonText: "Cancel", - confirmButtonColor: "#d33", - denyButtonColor: "#008000", - customClass: { - container: 'custom-swal-container' - } - }) - .then((result) => { - if (result.isConfirmed) { - const file = new Blob([response], { - type: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", - }); - const url = URL.createObjectURL(file); - const link = document.createElement("a"); - link.href = url; - link.download = "ImportError.xlsx"; - document.body.appendChild(link); - link.click(); - window.location.reload(); - } - else if (result.isDenied) { - formData.append("create_work_info", true); - $.ajax({ - type: "POST", - url: "/employee/work-info-import", - dataType: "binary", - data: formData, - processData: false, - contentType: false, - headers: { - "X-CSRFToken": getCookie("csrftoken"), - }, - xhrFields: { - responseType: "blob", - }, - success: function (response, textStatus, xhr) { - Swal.fire({ - text: `Employees Imported Successfully`, - icon: "success", - showConfirmButton: false, - timer: 3000, - timerProgressBar: true, - }).then(function() { - const file = new Blob([response], { - type: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", - }); - const url = URL.createObjectURL(file); - const link = document.createElement("a"); - link.href = url; - link.download = "ImportError.xlsx"; - document.body.appendChild(link); - link.click(); - window.location.reload(); - }); - - return; - } - }) - } - else { - $(".oh-dropdown__import-form").css("display", "block"); - $("#uploading").css("display", "none"); - } + var languageCode = $("#main-section-data").attr("data-lang"); + var allowedLanguageCodes = ["ar", "de", "es", "en", "fr"]; + if (allowedLanguageCodes.includes(languageCode)) { + callback(languageCode); + } else { + $.ajax({ + type: "GET", + url: "/employee/get-language-code/", + success: function (response) { + var ajaxLanguageCode = response.language_code; + $("#main-section-data").attr("data-lang", ajaxLanguageCode); + callback( + allowedLanguageCodes.includes(ajaxLanguageCode) + ? ajaxLanguageCode + : "en" + ); + }, + error: function () { + callback("en"); + }, }); - } - - }, - error: function (xhr, textStatus, errorThrown) { - console.error("Error downloading file:", errorThrown); - }, - }); -}); + } +} function template_download(e) { - e.preventDefault(); - var languageCode = null; - getCurrentLanguageCode(function (code) { - languageCode = code; - var confirmMessage = downloadMessages[languageCode]; - Swal.fire({ - text: confirmMessage, - icon: "question", - showCancelButton: true, - confirmButtonColor: "#008000", - cancelButtonColor: "#d33", - confirmButtonText: "Confirm", - }).then(function (result) { - if (result.isConfirmed) { - $("#loading").show(); + e.preventDefault(); + var languageCode = null; + getCurrentLanguageCode(function (code) { + languageCode = code; + var confirmMessage = downloadMessages[languageCode]; + Swal.fire({ + text: confirmMessage, + icon: "question", + showCancelButton: true, + confirmButtonColor: "#008000", + cancelButtonColor: "#d33", + confirmButtonText: "Confirm", + }).then(function (result) { + if (result.isConfirmed) { + $("#loading").show(); - var xhr = new XMLHttpRequest(); - xhr.open("GET", "/employee/work-info-import", true); - xhr.responseType = "arraybuffer"; + var xhr = new XMLHttpRequest(); + xhr.open("GET", "/employee/work-info-import-file", true); + xhr.responseType = "arraybuffer"; - xhr.upload.onprogress = function (e) { - if (e.lengthComputable) { - var percent = (e.loaded / e.total) * 100; - $(".progress-bar") - .width(percent + "%") - .attr("aria-valuenow", percent); - $("#progress-text").text( - "Uploading... " + percent.toFixed(2) + "%" - ); - } - }; + xhr.upload.onprogress = function (e) { + if (e.lengthComputable) { + var percent = (e.loaded / e.total) * 100; + $(".progress-bar") + .width(percent + "%") + .attr("aria-valuenow", percent); + $("#progress-text").text( + "Uploading... " + percent.toFixed(2) + "%" + ); + } + }; - xhr.onload = function (e) { - if (this.status == 200) { - const file = new Blob([this.response], { - type: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", - }); - const url = URL.createObjectURL(file); - const link = document.createElement("a"); - link.href = url; - link.download = "work_info_template.xlsx"; - document.body.appendChild(link); - link.click(); - } - }; + xhr.onload = function (e) { + if (this.status == 200) { + const file = new Blob([this.response], { + type: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + }); + const url = URL.createObjectURL(file); + const link = document.createElement("a"); + link.href = url; + link.download = "work_info_template.xlsx"; + document.body.appendChild(link); + link.click(); + } + }; - xhr.onerror = function (e) { - console.error("Error downloading file:", e); - }; - xhr.send(); - } + xhr.onerror = function (e) { + console.error("Error downloading file:", e); + }; + xhr.send(); + } + }); }); - }); } -$("#work-info-import-download").click(function (e) { - template_download(e); // Pass the event to the function -}); - -$("#work-info-import").click(function (e) { - template_download(e); // Pass the event to the function -}); - - $(document).ajaxStart(function () { - $("#loading").show(); + $("#loading").show(); }); $(document).ajaxStop(function () { - $("#loading").hide(); + $("#loading").hide(); }); function simulateProgress() { - getCurrentLanguageCode(function (code) { - let progressBar = document.querySelector(".progress-bar"); - let progressText = document.getElementById("progress-text"); + getCurrentLanguageCode(function (code) { + let progressBar = document.querySelector(".progress-bar"); + let progressText = document.getElementById("progress-text"); - let width = 0; - let interval = setInterval(function () { - if (width >= 100) { - clearInterval(interval); - progressText.innerText = uploadMessage; - setTimeout(function () { - document.getElementById("loading").style.display = "none"; - }, 3000); - Swal.fire({ - text: importMessage, - icon: "success", - showConfirmButton: false, - timer: 2000, - timerProgressBar: true, - }); - setTimeout(function () { - $("#workInfoImport").removeClass("oh-modal--show"); - location.reload(true); - }, 2000); - } else { - width++; - progressBar.style.width = width + "%"; - progressBar.setAttribute("aria-valuenow", width); - progressText.innerText = uploadingMessage[languageCode] + width + "%"; - } - }, 20); - }); + let width = 0; + let interval = setInterval(function () { + if (width >= 100) { + clearInterval(interval); + progressText.innerText = uploadMessage; + setTimeout(function () { + document.getElementById("loading").style.display = "none"; + }, 3000); + Swal.fire({ + text: importMessage, + icon: "success", + showConfirmButton: false, + timer: 2000, + timerProgressBar: true, + }); + setTimeout(function () { + $("#workInfoImport").removeClass("oh-modal--show"); + location.reload(true); + }, 2000); + } else { + width++; + progressBar.style.width = width + "%"; + progressBar.setAttribute("aria-valuenow", width); + progressText.innerText = uploadingMessage[languageCode] + width + "%"; + } + }, 20); + }); } diff --git a/employee/templates/employee/employee_import.html b/employee/templates/employee/employee_import.html new file mode 100644 index 000000000..d02898aab --- /dev/null +++ b/employee/templates/employee/employee_import.html @@ -0,0 +1,49 @@ +{% load i18n %} +
+

+ {% trans "Import Employee" %} +

+ +
+
+ {% csrf_token %} + + +
+ + + +
+
+
\ No newline at end of file diff --git a/employee/templates/employee_nav.html b/employee/templates/employee_nav.html index 0a8d7d0a3..4a459e5f1 100644 --- a/employee/templates/employee_nav.html +++ b/employee/templates/employee_nav.html @@ -1,578 +1,389 @@ {% load static %} {% load i18n %} -{% if perms.employee.add_employee %} - - - -{% endif %} {% if perms.employee.change_employee %} - - - - {% endif %} - {% if perms.employee.change_employee or perms.employee.add_employee or perms.employee.delete_employee %} -
-
- - -
-
- {% endif %} - {% if perms.employee.add_employee %} -
- -
- {% endif %} - - - + + - + diff --git a/employee/urls.py b/employee/urls.py index 1bf9e61e0..0c51a6b63 100644 --- a/employee/urls.py +++ b/employee/urls.py @@ -168,6 +168,11 @@ urlpatterns = [ path("employee-import", views.employee_import, name="employee-import"), path("employee-export", views.employee_export, name="employee-export"), path("work-info-import", views.work_info_import, name="work-info-import"), + path( + "work-info-import-file", + views.work_info_import_file, + name="work-info-import-file", + ), path("work-info-export", views.work_info_export, name="work-info-export"), path("get-birthday", views.get_employees_birthday, name="get-birthday"), path("dashboard", views.dashboard, name="dashboard"), diff --git a/employee/views.py b/employee/views.py index dcd4410ac..476797105 100755 --- a/employee/views.py +++ b/employee/views.py @@ -16,7 +16,6 @@ import calendar import json import operator import os -import re import threading from datetime import date, datetime, timedelta from urllib.parse import parse_qs @@ -28,12 +27,13 @@ from django.contrib import messages from django.contrib.auth.models import User from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist -from django.db import models, transaction +from django.db import models from django.db.models import F, ProtectedError from django.db.models.query import QuerySet from django.forms import DateInput, Select -from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict +from django.http import HttpResponse, HttpResponseRedirect, JsonResponse from django.shortcuts import get_object_or_404, redirect, render +from django.template.loader import render_to_string from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext as __ @@ -57,17 +57,14 @@ from base.models import ( Company, Department, EmailLog, - EmployeeShift, - EmployeeType, JobPosition, JobRole, RotatingShiftAssign, RotatingWorkTypeAssign, ShiftRequest, - WorkType, WorkTypeRequest, - clear_messages, ) +from base.views import generate_error_report from employee.filters import DocumentRequestFilter, EmployeeFilter, EmployeeReGroup from employee.forms import ( BonusPointAddForm, @@ -94,9 +91,11 @@ from employee.methods.methods import ( bulk_create_user_import, bulk_create_work_info_import, bulk_create_work_types, - convert_nan, + error_data_template, get_ordered_badge_ids, + process_employee_records, set_initial_password, + valid_import_file_headers, ) from employee.models import ( BonusPoint, @@ -191,7 +190,6 @@ def _check_reporting_manager(request, *args, **kwargs): return request.user.employee_get.reporting_manager.exists() -# Create your views here. @login_required def get_language_code(request): """ @@ -1885,11 +1883,13 @@ def employee_bulk_delete(request): """ This method is used to delete set of Employee instances """ - ids = request.POST["ids"] - ids = json.loads(ids) - for employee_id in ids: + ids = json.loads(request.POST.get("ids", "[]")) + if not ids: + messages.error(request, _("No IDs provided.")) + deleted_count = 0 + employees = Employee.objects.filter(id__in=ids).select_related("employee_user_id") + for employee in employees: try: - employee = Employee.objects.get(id=employee_id) if apps.is_installed("payroll"): if employee.contract_set.all().exists(): contracts = employee.contract_set.all() @@ -1898,16 +1898,19 @@ def employee_bulk_delete(request): contract.delete() user = employee.employee_user_id user.delete() - messages.success( - request, _("%(employee)s deleted.") % {"employee": employee} - ) + deleted_count += 1 except Employee.DoesNotExist: messages.error(request, _("Employee not found.")) except ProtectedError: messages.error( request, _("You cannot delete %(employee)s.") % {"employee": employee} ) - + if deleted_count > 0: + messages.success( + request, + _("%(deleted_count)s employees deleted.") + % {"deleted_count": deleted_count}, + ) return JsonResponse({"message": "Success"}) @@ -2419,19 +2422,19 @@ def convert_nan(field, dicts): try: float(field_value) return None - except ValueError: + except (ValueError, TypeError): return field_value @login_required @permission_required("employee.add_employee") -def work_info_import(request): +def work_info_import_file(request): """ - This method is used to import Employee instances and creates related objects + This method is used to return the excel file of import Employee instances """ data_frame = pd.DataFrame( columns=[ - "Badge id", + "Badge ID", "First Name", "Last Name", "Phone", @@ -2446,234 +2449,121 @@ def work_info_import(request): "Reporting Manager", "Company", "Location", - "Date joining", + "Date Joining", "Contract End Date", "Basic Salary", "Salary Hour", ] ) - error_data = { - "Badge id": [], - "First Name": [], - "Last Name": [], - "Phone": [], - "Email": [], - "Gender": [], - "Department": [], - "Job Position": [], - "Job Role": [], - "Work Type": [], - "Shift": [], - "Employee Type": [], - "Reporting Manager": [], - "Company": [], - "Location": [], - "Date joining": [], - "Contract End Date": [], - "Basic Salary": [], - "Salary Hour": [], - "Email Error": [], - "First Name error": [], - "Name and Email Error": [], - "Phone error": [], - "Joining Date Error": [], - "Contract Error": [], - "Badge ID Error": [], - "Basic Salary Error": [], - "Salary Hour Error": [], - "User ID Error": [], - } - # Export the DataFrame to an Excel file response = HttpResponse(content_type="application/ms-excel") response["Content-Disposition"] = 'attachment; filename="work_info_template.xlsx"' data_frame.to_excel(response, index=False) - create_work_info = False - if request.POST.get("create_work_info") == "true": - create_work_info = True - - if request.method == "POST" and request.FILES.get("file") is not None: - total_count = 0 - error_lists = [] - success_lists = [] - error_occured = False - file = request.FILES["file"] - file_extension = file.name.split(".")[-1].lower() - data_frame = ( - pd.read_csv(file) if file_extension == "csv" else pd.read_excel(file) - ) - work_info_dicts = data_frame.to_dict("records") - existing_badge_ids = set(Employee.objects.values_list("badge_id", flat=True)) - existing_usernames = set(User.objects.values_list("username", flat=True)) - existing_name_emails = set( - Employee.objects.values_list( - "employee_first_name", "employee_last_name", "email" - ) - ) - users = [] - for work_info in work_info_dicts: - error = False - try: - email = work_info["Email"] - phone = work_info["Phone"] - first_name = convert_nan("First Name", work_info) - last_name = convert_nan("Last Name", work_info) - badge_id = work_info["Badge id"] - date_joining = work_info["Date joining"] - contract_end_date = work_info["Contract End Date"] - basic_salary = convert_nan("Basic Salary", work_info) - salary_hour = convert_nan("Salary Hour", work_info) - pattern = r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$" - - try: - if pd.isna(email) or not re.match(pattern, email): - work_info["Email Error"] = f"Invalid Email address" - error = True - except: - error = True - work_info["Email Error"] = f"Invalid Email address" - - try: - pd.to_numeric(basic_salary) - except ValueError: - work_info["Basic Salary Error"] = f"Basic Salary must be a number" - error = True - - try: - pd.to_numeric(salary_hour) - except ValueError: - work_info["Salary Hour Error"] = f"Salary Hour must be a number" - error = True - - if pd.isna(first_name): - work_info["First Name error"] = f"First Name can't be empty" - error = True - - if pd.isna(phone): - work_info["Phone error"] = f"Phone Number can't be empty" - error = True - - name_email_tuple = (first_name, last_name, email) - if name_email_tuple in existing_name_emails: - work_info["Name and Email Error"] = ( - "An employee with this first name, last name, and email already exists." - ) - error = True - else: - existing_name_emails.add(name_email_tuple) - - try: - pd.to_datetime(date_joining).date() - except: - work_info["Joining Date Error"] = ( - f"Invalid Date format. Please use the format YYYY-MM-DD" - ) - error = True - - try: - pd.to_datetime(contract_end_date).date() - except: - work_info["Contract Error"] = ( - f"Invalid Date format. Please use the format YYYY-MM-DD" - ) - error = True - - if badge_id in existing_badge_ids: - work_info["Badge ID Error"] = ( - f"An Employee with the badge ID already exists" - ) - error = True - else: - existing_badge_ids.add(badge_id) - - if email in existing_usernames: - work_info["User ID Error"] = ( - f"User with the email ID already exists" - ) - error = True - else: - existing_usernames.add(email) - - if error: - error_lists.append(work_info) - else: - success_lists.append(work_info) - - except Exception as e: - error_occured = True - logger.error(e) - - if create_work_info or not error_lists: - try: - users = bulk_create_user_import(success_lists) - employees = bulk_create_employee_import(success_lists) - thread = threading.Thread( - target=set_initial_password, args=(employees,) - ) - thread.start() - - total_count = len(employees) - bulk_create_department_import(success_lists) - bulk_create_job_position_import(success_lists) - bulk_create_job_role_import(success_lists) - bulk_create_work_types(success_lists) - bulk_create_shifts(success_lists) - bulk_create_employee_types(success_lists) - bulk_create_work_info_import(success_lists) - - except Exception as e: - error_occured = True - logger.error(e) - - if error_occured: - messages.error(request, "something went wrong....") - data_frame = pd.DataFrame( - ["The provided titles don't match the default titles."], - columns=["Title Error"], - ) - - error_count = len(error_lists) - # Create an HTTP response object with the Excel file - response = HttpResponse(content_type="application/ms-excel") - response["Content-Disposition"] = 'attachment; filename="ImportError.xlsx"' - data_frame.to_excel(response, index=False) - response["X-Error-Count"] = error_count - return response - - if error_lists: - for item in error_lists: - for key, value in error_data.items(): - if key in item: - value.append(item[key]) - else: - value.append(None) - - keys_to_remove = [ - key - for key, value in error_data.items() - if all(v is None for v in value) - ] - - for key in keys_to_remove: - del error_data[key] - data_frame = pd.DataFrame(error_data, columns=error_data.keys()) - error_count = len(error_lists) - # Create an HTTP response object with the Excel file - response = HttpResponse(content_type="application/ms-excel") - response["Content-Disposition"] = 'attachment; filename="ImportError.xlsx"' - data_frame.to_excel(response, index=False) - response["X-Error-Count"] = error_count - return response - return JsonResponse( - { - "Success": "Employees Imported Succefully", - "success_count": total_count, - } - ) - return response +@login_required +@hx_request_required +@permission_required("employee.add_employee") +def work_info_import(request): + if request.method == "GET": + return render(request, "employee/employee_import.html") + + if request.method == "POST": + file = request.FILES.get("file") + if not file: + error_message = _("No file uploaded.") + return render( + request, + "employee/employee_import.html", + {"error_message": error_message}, + ) + + file_extension = file.name.split(".")[-1].lower() + + try: + if file_extension == "csv": + data_frame = pd.read_csv(file) + elif file_extension in ["xls", "xlsx"]: + data_frame = pd.read_excel(file) + else: + + error_message = _( + "Unsupported file format. Please upload a CSV or Excel file." + ) + return render( + request, + "employee/employee_import.html", + {"error_message": error_message}, + ) + + valid, error_message = valid_import_file_headers(data_frame) + if not valid: + return render( + request, + "employee/employee_import.html", + {"error_message": error_message}, + ) + success_list, error_list, created_count = process_employee_records( + data_frame + ) + if success_list: + try: + users = bulk_create_user_import(success_list) + employees = bulk_create_employee_import(success_list) + thread = threading.Thread( + target=set_initial_password, args=(employees,) + ) + thread.start() + + bulk_create_department_import(success_list) + bulk_create_job_position_import(success_list) + bulk_create_job_role_import(success_list) + bulk_create_work_types(success_list) + bulk_create_shifts(success_list) + bulk_create_employee_types(success_list) + bulk_create_work_info_import(success_list) + + except Exception as e: + messages.error(request, _("Error Occured {}").format(e)) + logger.error(e) + + path_info = ( + generate_error_report( + error_list, error_data_template, "EmployeesImportError.xlsx" + ) + if error_list + else None + ) + + context = { + "created_count": created_count, + "total_count": created_count + len(error_list), + "error_count": len(error_list), + "model": _("Employees"), + "path_info": path_info, + } + result = render_to_string("import_popup.html", context) + result += """ + + """ + return HttpResponse(result) + except Exception as e: + messages.error( + request, + _( + "Failed to read file. Please ensure it is a valid CSV or Excel file. : {}" + ).format(e), + ) + logger.error(f"File import error: {e}") + error_message = f"File import error: {e}" + return render( + request, "employee/employee_import.html", {"error_message": error_message} + ) + + @login_required @manager_can_enter("employee.view_employee") def work_info_export(request): @@ -2686,9 +2576,19 @@ def work_info_export(request): "export_form": EmployeeExportExcelForm(), } return render(request, "employee_export_filter.html", context) + employees_data = {} selected_columns = [] form = EmployeeExportExcelForm() + field_overrides = { + "employee_work_info__department_id": "employee_work_info__department_id__department", + "employee_work_info__job_position_id": "employee_work_info__job_position_id__job_position", + "employee_work_info__job_role_id": "employee_work_info__job_role_id__job_role", + "employee_work_info__shift_id": "employee_work_info__shift_id__employee_shift", + "employee_work_info__work_type_id": "employee_work_info__work_type_id__work_type", + "employee_work_info__reporting_manager_id": "employee_work_info__reporting_manager_id__get_full_name", + "employee_work_info__employee_type_id": "employee_work_info__employee_type_id__employee_type", + } employees = EmployeeFilter(request.GET).qs employees = filtersubordinatesemployeemodel( request, employees, "employee.view_employee" @@ -2699,54 +2599,61 @@ def work_info_export(request): ids = request.GET.get("ids") id_list = json.loads(ids) employees = Employee.objects.filter(id__in=id_list) - for field in excel_columns: - value = field[0] - key = field[1] + + prefetch_fields = list(set(f.split("__")[0] for f in selected_fields if "__" in f)) + if prefetch_fields: + employees = employees.select_related(*prefetch_fields) + + for value, key in excel_columns: if value in selected_fields: selected_columns.append((value, key)) - for column_value, column_name in selected_columns: - nested_attributes = column_value.split("__") - employees_data[column_name] = [] - for employee in employees: + + date_format = "YYYY-MM-DD" + user = request.user + emp = getattr(user, "employee_get", None) + if emp: + info = EmployeeWorkInformation.objects.filter(employee_id=emp).first() + if info: + company = Company.objects.filter(company=info.company_id).first() + if company and company.date_format: + date_format = company.date_format + + employees_data = {column_name: [] for _, column_name in selected_columns} + for employee in employees: + for column_value, column_name in selected_columns: + if column_value in field_overrides: + column_value = field_overrides[column_value] + + nested_attrs = column_value.split("__") value = employee - for attr in nested_attributes: + for attr in nested_attrs: value = getattr(value, attr, None) if value is None: break + + # Call the value if it's employee_work_info__reporting_manager_id__get_full_name + if callable(value): + try: + value = value() + except Exception: + value = "" + data = str(value) if value is not None else "" - if type(value) == date: - user = request.user - emp = user.employee_get - - # Taking the company_name of the user - info = EmployeeWorkInformation.objects.filter(employee_id=emp) - if info.exists(): - for i in info: - employee_company = i.company_id - company_name = Company.objects.filter(company=employee_company) - emp_company = company_name.first() - - # Access the date_format attribute directly - date_format = ( - emp_company.date_format if emp_company else "MMM. D, YYYY" + if isinstance(value, date): + try: + data = value.strftime( + HORILLA_DATE_FORMATS.get(date_format, "%Y-%m-%d") ) - else: - date_format = "MMM. D, YYYY" - # Convert the string to a datetime.date object - start_date = datetime.strptime(str(value), "%Y-%m-%d").date() - - # Print the formatted date for each format - for format_name, format_string in HORILLA_DATE_FORMATS.items(): - if format_name == date_format: - data = start_date.strftime(format_string) + except Exception: + data = str(value) if data == "True": data = _("Yes") elif data == "False": data = _("No") - employees_data[column_name].append(data) + employees_data[column_name].append(data) data_frame = pd.DataFrame(data=employees_data) response = HttpResponse(content_type="application/ms-excel") response["Content-Disposition"] = 'attachment; filename="employee_export.xlsx"' @@ -2995,7 +2902,6 @@ def employee_select_filter(request): request.GET, queryset=Employee.objects.filter() ) - # Get the filtered queryset filtered_employees = filtersubordinatesemployeemodel( request=request, queryset=employee_filter.qs, perm="employee.view_employee" ) @@ -3055,7 +2961,6 @@ def add_note(request, emp_id=None): note.save() note.note_files.set(attachment_ids) messages.success(request, _("Note added successfully..")) - response = render(request, "tabs/add_note.html", {"form": form}) return redirect(f"/employee/note-tab/{emp_id}") employee_obj = Employee.objects.get(id=emp_id)