Spaces:
Running
Running
Mallisetty Siva Mahesh
commited on
Commit
·
bc81c34
1
Parent(s):
c04d620
updated s3 bucket
Browse files- app.py +5 -1
- folder.py +21 -0
- s3_setup.py +75 -24
app.py
CHANGED
|
@@ -178,7 +178,11 @@ def perform_inference(file_paths: Dict[str, str], upload_to_s3: bool):
|
|
| 178 |
if upload_to_s3:
|
| 179 |
client = s3_client()
|
| 180 |
bucket_name = "edgekycdocs"
|
| 181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
file_name = unprocessed_file_path.split("/")[-1].replace(" ", "_")
|
| 183 |
|
| 184 |
try:
|
|
|
|
| 178 |
if upload_to_s3:
|
| 179 |
client = s3_client()
|
| 180 |
bucket_name = "edgekycdocs"
|
| 181 |
+
if doc_type == "cin_llpin":
|
| 182 |
+
folder_name = f"{doc_type.replace('_', '')}docs"
|
| 183 |
+
else:
|
| 184 |
+
folder_name = f"{doc_type.split('_')[0]}docs"
|
| 185 |
+
|
| 186 |
file_name = unprocessed_file_path.split("/")[-1].replace(" ", "_")
|
| 187 |
|
| 188 |
try:
|
folder.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
import boto3
|
| 3 |
+
# AWS credentials (if not set in environment variables or AWS CLI config)
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
# Load .env file
|
| 8 |
+
load_dotenv()
|
| 9 |
+
# Access variables
|
| 10 |
+
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
|
| 11 |
+
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
|
| 12 |
+
print("AWS_ACCESS_KEY_ID",AWS_ACCESS_KEY_ID)
|
| 13 |
+
print("AWS_SECRET_ACCESS_KEY",AWS_SECRET_ACCESS_KEY)
|
| 14 |
+
# Initialize S3 client
|
| 15 |
+
|
| 16 |
+
s3 = boto3.client('s3')
|
| 17 |
+
bucket_name = "edgekycdocs"
|
| 18 |
+
folder_name = "cinllpindocs"
|
| 19 |
+
|
| 20 |
+
s3.put_object(Bucket=bucket_name, Key=folder_name)
|
| 21 |
+
print(f"Folder '{folder_name}' created in bucket '{bucket_name}'")
|
s3_setup.py
CHANGED
|
@@ -1,44 +1,95 @@
|
|
| 1 |
import boto3
|
|
|
|
| 2 |
# AWS credentials (if not set in environment variables or AWS CLI config)
|
| 3 |
from dotenv import load_dotenv
|
| 4 |
import os
|
| 5 |
import sys
|
| 6 |
-
|
| 7 |
from utils import doc_processing
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
# Load .env file
|
| 10 |
load_dotenv()
|
| 11 |
# Access variables
|
| 12 |
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
|
| 13 |
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
|
| 14 |
-
print("AWS_ACCESS_KEY_ID",AWS_ACCESS_KEY_ID)
|
| 15 |
-
print("AWS_SECRET_ACCESS_KEY",AWS_SECRET_ACCESS_KEY)
|
| 16 |
# Initialize S3 client
|
| 17 |
|
| 18 |
-
class s3_client:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
def __init__(self):
|
| 20 |
-
self.aws_access_key_id = AWS_ACCESS_KEY_ID
|
| 21 |
-
self.aws_secret_access_key = AWS_SECRET_ACCESS_KEY
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
def upload_file(self,local_file_path, bucket_name,folder_name,file_name):
|
| 31 |
try:
|
| 32 |
-
|
| 33 |
-
client.upload_file(local_file_path, bucket_name, f"{folder_name}/{file_name}")
|
| 34 |
-
print(f"File uploaded successfully to {bucket_name}/{folder_name}{file_name}")
|
| 35 |
-
url = f"https://edgekycdocs.s3.eu-north-1.amazonaws.com/{folder_name}/{file_name}"
|
| 36 |
-
print("file url",url)
|
| 37 |
-
return {"status": 200, "message":"file uploaded successfully" , "url" : url}
|
| 38 |
-
except Exception as e:
|
| 39 |
-
print("Error uploading file:", e)
|
| 40 |
-
return {"status": 400, "message":e}
|
| 41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import boto3
|
| 2 |
+
|
| 3 |
# AWS credentials (if not set in environment variables or AWS CLI config)
|
| 4 |
from dotenv import load_dotenv
|
| 5 |
import os
|
| 6 |
import sys
|
|
|
|
| 7 |
from utils import doc_processing
|
| 8 |
+
import mimetypes
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from django.conf import settings
|
| 11 |
+
|
| 12 |
+
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
| 13 |
|
| 14 |
# Load .env file
|
| 15 |
load_dotenv()
|
| 16 |
# Access variables
|
| 17 |
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
|
| 18 |
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
|
| 19 |
+
print("AWS_ACCESS_KEY_ID", AWS_ACCESS_KEY_ID)
|
| 20 |
+
print("AWS_SECRET_ACCESS_KEY", AWS_SECRET_ACCESS_KEY)
|
| 21 |
# Initialize S3 client
|
| 22 |
|
| 23 |
+
# class s3_client:
|
| 24 |
+
# def __init__(self):
|
| 25 |
+
# self.aws_access_key_id = AWS_ACCESS_KEY_ID
|
| 26 |
+
# self.aws_secret_access_key = AWS_SECRET_ACCESS_KEY
|
| 27 |
+
|
| 28 |
+
# def initialize(self):
|
| 29 |
+
# return boto3.client(
|
| 30 |
+
# 's3',
|
| 31 |
+
# aws_access_key_id=self.aws_access_key_id,
|
| 32 |
+
# aws_secret_access_key=self.aws_secret_access_key
|
| 33 |
+
# )
|
| 34 |
+
|
| 35 |
+
# def upload_file(self,local_file_path, bucket_name,folder_name,file_name):
|
| 36 |
+
# try:
|
| 37 |
+
# client = self.initialize()
|
| 38 |
+
# client.upload_file(local_file_path, bucket_name, f"{folder_name}/{file_name}")
|
| 39 |
+
# print(f"File uploaded successfully to {bucket_name}/{folder_name}{file_name}")
|
| 40 |
+
# url = f"https://edgekycdocs.s3.eu-north-1.amazonaws.com/{folder_name}/{file_name}"
|
| 41 |
+
# print("file url",url)
|
| 42 |
+
# return {"status": 200, "message":"file uploaded successfully" , "url" : url}
|
| 43 |
+
# except Exception as e:
|
| 44 |
+
# print("Error uploading file:", e)
|
| 45 |
+
# return {"status": 400, "message":e}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class S3Client:
|
| 49 |
def __init__(self):
|
| 50 |
+
self.aws_access_key_id = settings.AWS_ACCESS_KEY_ID
|
| 51 |
+
self.aws_secret_access_key = settings.AWS_SECRET_ACCESS_KEY
|
| 52 |
+
self.s3_client = boto3.client(
|
| 53 |
+
"s3",
|
| 54 |
+
aws_access_key_id=self.aws_access_key_id,
|
| 55 |
+
aws_secret_access_key=self.aws_secret_access_key,
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
def upload_file(self, local_file_path, bucket_name, folder_name, file_name):
|
|
|
|
|
|
|
| 59 |
try:
|
| 60 |
+
file_key = f"{folder_name}/{file_name}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
+
# Determine the MIME type dynamically
|
| 63 |
+
# Determine the correct MIME type
|
| 64 |
+
content_type, _ = mimetypes.guess_type(local_file_path)
|
| 65 |
+
if content_type is None:
|
| 66 |
+
content_type = "application/octet-stream" # Default fallback
|
| 67 |
|
| 68 |
+
# Explicitly handle common file types
|
| 69 |
+
if file_name.lower().endswith(
|
| 70 |
+
(".pdf", ".jpg", ".jpeg", ".png", ".gif", ".webp")
|
| 71 |
+
):
|
| 72 |
+
content_type = (
|
| 73 |
+
mimetypes.guess_type(file_name)[0] or "application/octet-stream"
|
| 74 |
+
)
|
| 75 |
|
| 76 |
+
# Upload with correct Content-Type
|
| 77 |
+
self.s3_client.upload_file(
|
| 78 |
+
local_file_path,
|
| 79 |
+
bucket_name,
|
| 80 |
+
file_key,
|
| 81 |
+
ExtraArgs={
|
| 82 |
+
"ContentDisposition": "inline", # Ensure inline display
|
| 83 |
+
"ContentType": content_type,
|
| 84 |
+
},
|
| 85 |
+
)
|
| 86 |
|
| 87 |
+
file_url = f"https://{bucket_name}.s3.amazonaws.com/{file_key}"
|
| 88 |
+
print(f"Uploading {file_name} with ContentType: {content_type}")
|
| 89 |
+
return {
|
| 90 |
+
"status": 200,
|
| 91 |
+
"message": "File uploaded successfully",
|
| 92 |
+
"url": file_url,
|
| 93 |
+
}
|
| 94 |
+
except Exception as e:
|
| 95 |
+
return {"status": 400, "message": str(e)}
|