DamageAssessment_Backend/DamageAssesmentApi/DamageAssesment.Api.DocuLinks/Providers/AzureBlobService.cs

195 lines
8.1 KiB
C#
Raw Normal View History

2023-08-31 18:00:51 -05:00

using Azure;
2023-08-31 18:00:51 -05:00
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using Azure.Storage.Blobs.Specialized;
2023-09-22 10:52:17 -05:00
using DamageAssesment.Api.DocuLinks.Interfaces;
using DamageAssesment.Api.DocuLinks.Models;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using Microsoft.VisualBasic;
using System.ComponentModel;
using System.IO;
using System.Text;
using System.Threading.Tasks;
2023-08-31 18:00:51 -05:00
2023-09-22 10:52:17 -05:00
namespace DamageAssesment.Api.DocuLinks.Providers
2023-08-31 18:00:51 -05:00
{
public class AzureBlobService: IAzureBlobService
{
BlobServiceClient _blobClient;
BlobContainerClient _containerClient;
string azureConnectionString;
private string uploadpath = "";
private string Deletepath = "";
public AzureBlobService(IConfiguration configuration)
2023-08-31 18:00:51 -05:00
{
uploadpath = configuration.GetValue<string>("Fileupload:folderpath");
Deletepath = configuration.GetValue<string>("Fileupload:Deletepath");
_blobClient = new BlobServiceClient(configuration.GetValue<string>("Fileupload:BlobConnectionString"));
_containerClient = _blobClient.GetBlobContainerClient(configuration.GetValue<string>("Fileupload:BlobContainerName"));
}
public async Task<Models.Doculink> UploadDocument(int counter, ReqDoculink documentInfo)
{
Models.Doculink Documents = new Models.Doculink();
List <Models.DoculinkAttachments> attachments = new List<Models.DoculinkAttachments>();
try
{
string path = "", UserfileName = "";
if (documentInfo.Files != null)
{
int counter1 = 1;
foreach (var item in documentInfo.Files)
{
if (item.IsAttachments)
{
UserfileName = Path.GetFileName(item.FileName);
var fileName = String.Format("Document_{0}_{1}{2}", counter, counter1, item.FileExtension);
byte[] byteArray = Convert.FromBase64String(item.FileContent);
MemoryStream stream = new MemoryStream(byteArray);
BlobClient client = _containerClient.GetBlobClient(uploadpath + "/" + fileName);
var result = await client.UploadAsync(stream, true);
path = uploadpath + "/" + fileName;
counter1++;
}
else
path = item.url;
attachments.Add(new Models.DoculinkAttachments { docName = UserfileName, Path = path, IsAttachments = item.IsAttachments, CustomOrder = item.CustomOrder });
}
}
Documents = new Models.Doculink()
{
linkTypeId = documentInfo.linkTypeId,
documentsTranslations = documentInfo.documentsTranslations,
doclinksAttachments = attachments,
IsDeleted = false,
CustomOrder = documentInfo.CustomOrder,
IsActive = true
};
return Documents;
}
catch (Exception ex)
{
return new Models.Doculink();
}
}
public async Task<Models.Doculink> UpdateDocuments(int counter, Models.Doculink document, ReqDoculink documentInfo)
{
try
{
foreach (var item in document.doclinksAttachments)
{
Movefile(item.Path);
}
string path = "", UserfileName = "";
List<Models.DoculinkAttachments> attachments = new List<Models.DoculinkAttachments>();
int counter1 = 1;
foreach (var item in documentInfo.Files)
{
if (item.IsAttachments)
{
UserfileName = Path.GetFileName(item.FileName);
var fileName = String.Format("Document_{0}_{1}{2)", document.Id, counter1, item.FileExtension);
byte[] byteArray = Encoding.UTF8.GetBytes(item.FileContent);
MemoryStream stream = new MemoryStream(byteArray);
BlobClient client = _containerClient.GetBlobClient(uploadpath + "/" + fileName);
path = uploadpath + "/" + fileName;
var result = await client.UploadAsync(stream, true);
counter1++;
}
else
path = item.url;
attachments.Add(new Models.DoculinkAttachments { docName = UserfileName, Path = path, IsAttachments = item.IsAttachments, CustomOrder = item.CustomOrder });
}
Models.Doculink Documents = new Models.Doculink()
{
Id = documentInfo.Id,
linkTypeId = documentInfo.linkTypeId,
documentsTranslations = documentInfo.documentsTranslations,
IsActive = true,
IsDeleted = false,
CustomOrder = documentInfo.CustomOrder,
doclinksAttachments = attachments
};
return Documents;
}
catch (Exception ex)
{
return new Models.Doculink();
}
2023-08-31 18:00:51 -05:00
}
public async Task<List<Azure.Response<BlobContentInfo>>> UploadFiles(List<IFormFile> files)
{
var azureResponse = new List<Azure.Response<BlobContentInfo>>();
foreach (var file in files)
{
string fileName = file.FileName;
using (var memoryStream = new MemoryStream())
{
file.CopyTo(memoryStream);
memoryStream.Position = 0;
var client = await _containerClient.UploadBlobAsync(fileName, memoryStream, default);
azureResponse.Add(client);
}
};
return azureResponse;
}
public string getMovefilename(string movefilename)
{
var list = movefilename.Split('.');
if (list.Length > 0)
list[list.Length - 1] = DateTime.Now.ToShortDateString().Replace("/", "_") +"_"+ DateTime.Now.ToShortTimeString().Replace("/", "_")+"." + list[list.Length - 1];
return string.Join("_", list);
}
public void Movefile(string path)
{
try
{
if (path != "")
{
string MovePath = getMovefilename(path.Replace(uploadpath, Deletepath));
// Get references to the source and destination blobs
BlobClient sourceBlobClient = _containerClient.GetBlobClient(path);
BlobClient destinationBlobClient = _containerClient.GetBlobClient(MovePath);
// Start the copy operation from the source to the destination
destinationBlobClient.StartCopyFromUri(sourceBlobClient.Uri);
// Check if the copy operation completed successfully
WaitForCopyToComplete(destinationBlobClient);
// Delete the source blob after a successful copy
sourceBlobClient.DeleteIfExists();
}
}
catch(Exception ex)
{
}
}
static void WaitForCopyToComplete(BlobClient blobClient)
{
BlobProperties properties = blobClient.GetProperties();
while (properties.CopyStatus == CopyStatus.Pending)
{
Task.Delay(TimeSpan.FromSeconds(1));
properties = blobClient.GetProperties();
}
}
2023-08-31 18:00:51 -05:00
public void DeleteFile(string url)
{
BlobClient sourceBlobClient = _containerClient.GetBlobClient(url);
sourceBlobClient.DeleteIfExists();
2023-08-31 18:00:51 -05:00
}
}
}