I am stuck at this problem for two days.
I have added 4 blob storage origins in a single origin group at azure front door. I am doing hit and trial on blob sas tokens.
I have not updated any settings like weights and prioity, health probes are enabled. Please tell me what i am doing wrong.
Code On Request:
Sorry for a little untidy code cuz i was in a hurry.
Latency Calculation on User Login:
import { azurePingContainers } from "@/constants/azure-containers";
import axios from "axios";
export async function checkAzureLatency() {
const response = await Promise.all(azurePingContainers.map(container => {
return new Promise<{ latency: number, account: string }>((resolve) => {
const startTime = performance.now();
axios.get(container.url)
.then(() => {
const endTime = performance.now();
let latency = endTime - startTime;
console.log(`${container.account} latency: ${latency}ms`);
latency = parseFloat(latency.toFixed(2));
resolve({ latency, account: container.account });
})
.catch(error => {
console.error(`Error fetching ${container.account} latency: ${error}`);
resolve({ latency: Infinity, account: container.account });
});
})
}))
const validResponses = response.filter(result => result.latency !== Infinity)
if (validResponses.length === 0) {
return null;
}
return validResponses.sort((a, b) => a.latency - b.latency)
}
export async function averageLatency() {
const latencyChecks = (await Promise.all([
checkAzureLatency(),
checkAzureLatency(),
checkAzureLatency()
])).filter(result => result !== null)
if (latencyChecks.length === 0) {
return null
}
console.log(latencyChecks)
const totalLatencyArray = latencyChecks.reduce((acc, val) => {
val.forEach((current) => {
const sameValue = acc.findIndex((x) => x.account === current.account)
if (sameValue>-1) {
acc[sameValue] = {latency: parseFloat((current.latency + acc[sameValue].latency).toFixed(2)) ,account: current.account}
} else {
acc.push(current)
}
})
return acc
}, [])
console.log('totalLatencyArray', totalLatencyArray)
const averageLatencyArray = totalLatencyArray.map(item => ({
latency: parseFloat((item.latency / latencyChecks.length).toFixed(2)),
account: item.account
}));
console.log(averageLatencyArray)
localStorage.setItem('latency', JSON.stringify(averageLatencyArray))
return averageLatencyArray
}
Upload Handler:
const getSasToken = async (container: string, account: string, ipAddress: string, fileType: string) => {
const tokenResponse = await ax.post(
`storage/sas`,
{
container,
account,
ipAddress,
fileType,
},
{
headers: {
...authHeader(),
},
},
);
if (tokenResponse.status !== 201 || !tokenResponse.data?.data?.sasToken) {
console.error("Failed to get SAS token from backend!");
return null;
}
const tokensResponseData = tokenResponse.data.data;
const { sasToken } = tokensResponseData
return sasToken
}
const handleUpload = async (container: string, account: string, ipAddress: string, file: File) => {
const token = await getSasToken(container, account, ipAddress, file.type)
if (!token) return null
const blobService = new BlobService();
return await blobService.uploadFile(file, token, container, account);
}
export const handleVideoUpload = async (file: File, container: string) => {
try {
let retryCounts = 0
const ipAddress = "192.something";
// await ax
// .get("http://api.ipify.org/?format=json")
// .then((response) => response.data)
// .then((data) => {
// if (data) {
// ipAddress = data.ip;
// }
// });
if (file) {
let leastLatencyAccounts = JSON.parse(localStorage.getItem('latency') || '[]')
if (leastLatencyAccounts && leastLatencyAccounts?.length > 0) {
const len = leastLatencyAccounts.length
for (const accountInfo of leastLatencyAccounts) {
if (len >= 5 && retryCounts === 3 || len <= 4 && retryCounts === 2 || len === retryCounts) {
console.error("Failed to upload file with retries")
return null
}
const response = await handleUpload(container, accountInfo.account, ipAddress, file)
if (!response) {
console.error(`Failed to upload the file with account storage ${accountInfo.account}!`);
leastLatencyAccounts.shift()
localStorage.setItem('latency', JSON.stringify(leastLatencyAccounts))
retryCounts++;
continue;
}
return response;
}
// todo buy to set ip address, also encryption------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
} else {
const response = await handleUpload(container, azurePingContainers[ 0 ].account, ipAddress, file)
if (!response) {
console.error(`Failed to upload the file with account storage ${azurePingContainers[ 0 ].account}!`);
}
return response;
}
}
} catch (error) {
console.error("There was an error uploading the file!", error);
}
};
Upload Service:
export class BlobService {
constructor() { }
async uploadFile(file: File, token: string, container: string, account: string) {
const fileName = this.generateFileName(file);
try {
const response = await this.uploadToBlobStorage(file, token, account, container, fileName);
return { response, fileName };
} catch (e) {
console.error('Error processing token:', e);
return null
}
}
generateFileName(file: File): string {
return `${file.name}-${Date.now()}.${file.name.split('.').pop()}`;
}
async uploadToBlobStorage(file: File, token: string, account: string, container: string, fileName: string): Promise<{ responseBlob: BlobUploadCommonResponse, account: string } | null> {
const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net?${token}`);
const containerClient = blobServiceClient.getContainerClient(container);
const blockBlobClient = containerClient.getBlockBlobClient(fileName);
const responseBlob = await this.upload(file, blockBlobClient);
if (responseBlob.errorCode) {
console.warn('Error in responseBlob:', responseBlob.errorCode);
return null;
}
return { responseBlob, account };
}
async upload(file: File, blockBlobClient: BlockBlobClient) {
console.log('Starting upload...');
const response = await blockBlobClient.uploadData(file, {
blobHTTPHeaders: { blobContentType: file.type },
concurrency: 20
});
store.dispatch(setProgressValue(100))
return response
}
}
Backend:
async generateSAS(createSasTokenDto: GenerateSasTokenDto) {
try {
const { container, fileType, ipAddress ,account} = createSasTokenDto
const permissions = new ContainerSASPermissions()
permissions.write = true
const blobServiceClient= await this.getGenericServiceClient(account)
const blobContainerClient = await this.getGenericContainerClient(container, blobServiceClient)
const zeroedSeconds = new Date().setSeconds(0)
const options = {
expiresOn: new Date(Date.now() + (1000 * 60 * 60 * 24 * 120 //?When to expire
)),
contentType: fileType,
permissions,
startsOn: new Date(zeroedSeconds),
protocol: SASProtocol.Https,
// ipRange: {
// start: ipAddress,
// end: ipAddress,
// }
}
const sasToken = await blobContainerClient.generateSasUrl(options)
return {
sasToken:sasToken.split(`${container}?`)[1],
}
} catch (error) {
console.error(error)
throw new Error(error.message || "Failed to create to SAS")
}
}
async getGenericServiceClient(account:string) {
let accountKey=null;
switch (account) {
case AZURE_STORAGE_ACCOUNTS.US_EAST_2:
accountKey = this.configService.getOrThrow('azure-connection-storage-eastus2-key')
break;
case AZURE_STORAGE_ACCOUNTS.UAE_NORTH:
accountKey = this.configService.getOrThrow('azure-connection-storage-uaenorth-key')
break;
default:
throw new Error('Invalid Azure Storage Account')
}
const url = `https://${account}.blob.core.windows.net`
console.log(account,accountKey)
return new BlobServiceClient(url,new StorageSharedKeyCredential(account,accountKey))
}
async getGenericContainerClient(container: string, blobServiceClient: BlobServiceClient) {
return blobServiceClient.getContainerClient(container)
}
Expectation: Upload file to the best and nearest origin to the end user. Result: Front door ALWAYS uploads file to the LATEST added storage account or the last one. Latency settings
We can use front door for this purpose, for me my probes were failing. But this approach removes the need for it if only upload is necessary. I blended this with database later by saving accounts in database in this format
{
account:string,
url:string,
envVar:string
}
Thus making everything dynamic.