Issue
I'm currently working on a Flask application that involves uploading files to Azure Blob Storage and then inserting data into a SQL Server database. However, I'm encountering an issue with the upload_file function. Here's my code:
The problem I'm encountering is the file is not being uploaded. I've checked the Azure Blob Storage, and the file is not uploaded correctly, and also there seems to be an issue with inserting the data into the database.
# Database credentials
SERVER =
DATABASE =
USERNAME =
PASSWORD =
DRIVER =
# Creating the connection string
connection_string = f'DRIVER={{{DRIVER}}};SERVER={SERVER};DATABASE={DATABASE};UID={USERNAME};PWD={PASSWORD}'
# Azure Blob Storage configuration
connect_str = ""
container_name = 'files'
blob_service_client = BlobServiceClient.from_connection_string(connect_str)\`
Here is my upload main function, I'm having an issue with the following function when I try to upload to a table.
@app.route('/upload', methods=\['POST'\])
def upload_file():
file = request.files\['file'\]
if not file:
return jsonify({'message': 'No file provided'}), 400
blob_path = f"upload/trainfile/{file.filename}"
blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_path)
conn = None
try:
if blob_client.exists():
return jsonify({'message': 'File already exists'}), 409
blob_client.upload_blob(file)
pdf_url = blob_client.url
conn = pyodbc.connect(connection_string)
cursor = conn.cursor()
main_fields = {key: request.form[key] for key in request.form.keys() if not key.startswith('item_')}
columns = ', '.join(main_fields.keys())
placeholders = ', '.join('?' * len(main_fields))
main_sql = f"INSERT INTO TrainData ({columns}, pdf_url, created_at) VALUES ({placeholders}, ?, GETDATE())"
cursor.execute(main_sql, *(list(main_fields.values()) + [pdf_url]))
train_data_id = cursor.execute("SELECT @@IDENTITY AS id;").fetchval()
item_keys = [key for key in request.form.keys() if key.startswith('item_')]
grouped_item_data = {}
for key in item_keys:
index, item_attr = key.split('_')[1], '_'.join(key.split('_')[2:])
grouped_item_data.setdefault(index, {})[item_attr] = request.form[key]
for item in grouped_item_data.values():
item_columns = ', '.join(item.keys())
item_placeholders = ', '.join('?' * len(item))
item_sql = f"INSERT INTO ItemDetails (train_data_id, {item_columns}) VALUES (?, {item_placeholders})"
cursor.execute(item_sql, train_data_id, *item.values())
conn.commit()
cursor.close()
return jsonify({'message': 'Data saved successfully', 'file_url': pdf_url}), 200
except Exception as e:
if conn:
conn.rollback()
# Print the error message to the console for debugging
print("An error occurred:", e)
return jsonify({'message': 'Failed to save data', 'error': str(e)}), 500
finally:
if conn:
conn.close()
I've also tried to use sql alchemy and it's not working with that either.
Solution
The code you have shared has issues even after uploading the file and inserting values into the table correctly. Therefore, I have split the code to run with /insert
and /upload
in a same file. I have added a try and except block while uploading the file and made changes in the insert query.
The code inserts the value into the table and uploads the file to Azure Blob.
The code below inserts data into an Azure SQL Server database using Flask, PyODBC, and uploads files while saving the uploaded files to an Azure Storage Blob, handling some error scenarios.
I have changed the body formate of your code to raw json.
app = Flask(__name__)
# Replace these with your Azure SQL Server details
server = 'your_server_name.database.windows.net'
database = 'your_database_name'
username = 'your_username'
password = 'your_password'
driver = '{ODBC Driver 17 for SQL Server}'
# Establish a connection to the Azure SQL database
connection_string = f'DRIVER={driver};SERVER={server};DATABASE={database};UID={username};PWD={password}'
connection = pyodbc.connect(connection_string)
cursor = connection.cursor()
@app.route('/insert_data', methods=['POST'])
def insert_data():
try:
data = request.json
# Sample INSERT into TrainData
cursor.execute("""
INSERT INTO TrainData (field1, field2, pdf_url, created_at)
VALUES (?, ?, ?, ?)
""", data['field1'], data['field2'], data['pdf_url'], datetime.now())
# Get the last inserted ID (train_data_id)
cursor.execute("SELECT SCOPE_IDENTITY() AS last_id")
train_data_id = cursor.fetchone().last_id
# Sample INSERT into ItemDetails linked to TrainData
for item_data in data['item_details']:
cursor.execute("""
INSERT INTO ItemDetails (train_data_id, item_field1, item_field2)
VALUES (?, ?, ?)
""", train_data_id, item_data['item_field1'], item_data['item_field2'])
# Commit the changes
connection.commit()
return jsonify({"message": "Data inserted successfully!"}), 201
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == '__main__':
app.run(debug=True)
# Replace with your Azure Storage account connection string and container name
connection_string = "Azure Storage account connection string "
container_name = " Azure Storage Container Name "
blob_service_client = BlobServiceClient.from_connection_string(connection_string)
container_client = blob_service_client.get_container_client(container_name)
@app.route('/upload', methods=['POST'])
def upload_file():
try:
file = request.files['file']
if file:
blob_client = container_client.get_blob_client(file.filename)
blob_client.upload_blob(file.read(), content_settings=ContentSettings(content_type=file.content_type))
return jsonify({"message": "File uploaded successfully"})
else:
return jsonify({"error": "No file provided in the request"}), 400
except Exception as e:
return jsonify({"error": str(e)}), 500
Sample Input:
{
"field1": "Value2",
"field2": 42,
"pdf_url": "https://example.com/pdf1",
"item_details": [
{"item_field1": "ItemValue1", "item_field2": 15},
{"item_field1": "ItemValue2", "item_field2": 20}
]
}
Azure SQL
Azure Storage:
Answered By - Sampath
0 comments:
Post a Comment
Note: Only a member of this blog may post a comment.