diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs
index 603c5dbb53d61a0f9307a2a0fb5065e6447bb023..f97403a0eb408e5c10304905ed1821274919bbca 100644
--- a/src/api/core/ciphers.rs
+++ b/src/api/core/ciphers.rs
@@ -205,7 +205,7 @@ pub struct CipherData {
     */
     pub Type: i32,
     pub Name: String,
-    Notes: Option<String>,
+    pub Notes: Option<String>,
     Fields: Option<Value>,
 
     // Only one of these should exist, depending on type
@@ -542,6 +542,12 @@ async fn post_ciphers_import(
 
     let data: ImportData = data.into_inner().data;
 
+    // Validate the import before continuing
+    // Bitwarden does not process the import if there is one item invalid.
+    // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
+    // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
+    Cipher::validate_notes(&data.Ciphers)?;
+
     // Read and create the folders
     let mut folders: Vec<_> = Vec::new();
     for folder in data.Folders.into_iter() {
diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs
index dbfd8f02fa54ae2d024f76e99e8e9b9a3260fc36..d029cb60fb30e8fa9fde7f07ebc1545eaf60808f 100644
--- a/src/api/core/mod.rs
+++ b/src/api/core/mod.rs
@@ -7,7 +7,7 @@ mod organizations;
 mod sends;
 pub mod two_factor;
 
-pub use ciphers::{purge_trashed_ciphers, CipherSyncData, CipherSyncType};
+pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
 pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
 pub use events::{event_cleanup_job, log_event, log_user_event};
 pub use sends::purge_sends;
diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs
index 003961501353724332740ab1faf73d3e74be8c7e..3fb83ae22d2a0ccbf55b102999f5645e4e5b53fb 100644
--- a/src/api/core/organizations.rs
+++ b/src/api/core/organizations.rs
@@ -1378,6 +1378,12 @@ async fn post_org_import(
     let data: ImportData = data.into_inner().data;
     let org_id = query.organization_id;
 
+    // Validate the import before continuing
+    // Bitwarden does not process the import if there is one item invalid.
+    // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
+    // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
+    Cipher::validate_notes(&data.Ciphers)?;
+
     let mut collections = Vec::new();
     for coll in data.Collections {
         let collection = Collection::new(org_id.clone(), coll.Name);
diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs
index da40af9087d79881fb9ef44609133909f5f2778a..b7d26bd3d7c0122526620af59d1285f66c0848da 100644
--- a/src/db/models/cipher.rs
+++ b/src/db/models/cipher.rs
@@ -6,7 +6,7 @@ use super::{
     Attachment, CollectionCipher, Favorite, FolderCipher, Group, User, UserOrgStatus, UserOrgType, UserOrganization,
 };
 
-use crate::api::core::CipherSyncData;
+use crate::api::core::{CipherData, CipherSyncData};
 
 use std::borrow::Cow;
 
@@ -73,6 +73,33 @@ impl Cipher {
             reprompt: None,
         }
     }
+
+    pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
+        let mut validation_errors = serde_json::Map::new();
+        for (index, cipher) in cipher_data.iter().enumerate() {
+            if let Some(note) = &cipher.Notes {
+                if note.len() > 10_000 {
+                    validation_errors.insert(
+                        format!("Ciphers[{index}].Notes"),
+                        serde_json::to_value([
+                            "The field Notes exceeds the maximum encrypted value length of 10000 characters.",
+                        ])
+                        .unwrap(),
+                    );
+                }
+            }
+        }
+        if !validation_errors.is_empty() {
+            let err_json = json!({
+                "message": "The model state is invalid.",
+                "validationErrors" : validation_errors,
+                "object": "error"
+            });
+            err_json!(err_json, "Import validation errors")
+        } else {
+            Ok(())
+        }
+    }
 }
 
 use crate::db::DbConn;