Have been using Github copilot agents for assisting in code writing, and hugely impressed by what it can do. Yes they can do better but you need to explain/instruct what you are asking for. This small program I was able to get written by the agent in a single prompt. Not saying the best solution, but a good skeleton to begin with and you can build on top of it.
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Specialized;
using Azure.Storage.Blobs.Models;
using System.Data.SQLite;
namespace SampleUploadFileToCloudVibeAgent
{
public class LargeFileUploadRetryable
{
private readonly string _connectionString;
private readonly int _chunkSize;
public LargeFileUploadRetryable(string dbPath, int chunkSize = 4 * 1024 * 1024)
{
_connectionString = $"Data Source={dbPath};Version=3;";
_chunkSize = chunkSize;
InitializeDb();
}
private void InitializeDb()
{
using var conn = new SQLiteConnection(_connectionString);
conn.Open();
using var cmd = conn.CreateCommand();
cmd.CommandText = @"CREATE TABLE IF NOT EXISTS UploadChunks (
FilePath TEXT,
BlockId TEXT,
Offset INTEGER,
Size INTEGER,
Uploaded INTEGER,
PRIMARY KEY (FilePath, BlockId)
);";
cmd.ExecuteNonQuery();
}
public async Task UploadFileAsync(string filePath, string blobConnectionString, string containerName, string blobName)
{
// Check if file exists
if (!File.Exists(filePath))
throw new FileNotFoundException("File not found.", filePath);
// Check for internet connectivity
if (!IsInternetAvailable())
throw new InvalidOperationException("No internet connection available.");
var blobClient = new BlockBlobClient(blobConnectionString, containerName, blobName);
var fileLength = new FileInfo(filePath).Length;
var blockIds = new List<string>();
using var fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
int blockNum = 0;
long offset = 0;
while (offset < fileLength)
{
int bytesToRead = (int)Math.Min(_chunkSize, fileLength - offset);
string blockId = Convert.ToBase64String(BitConverter.GetBytes(blockNum));
if (!IsBlockUploaded(filePath, blockId))
{
byte[] buffer = new byte[bytesToRead];
fileStream.Seek(offset, SeekOrigin.Begin);
int read = await fileStream.ReadAsync(buffer, 0, bytesToRead);
bool uploaded = false;
int retry = 0;
while (!uploaded && retry < 5)
{
try
{
using var ms = new MemoryStream(buffer, 0, read);
await blobClient.StageBlockAsync(blockId, ms);
MarkBlockUploaded(filePath, blockId, offset, read);
uploaded = true;
}
catch
{
await Task.Delay(1000 * (retry + 1));
retry++;
if (!IsInternetAvailable())
{
// Queue a task to retry pending chunks after 2 minutes
await Task.Delay(TimeSpan.FromMinutes(2));
await RetryPendingChunksAsync(filePath, blobConnectionString, containerName, blobName);
return;
}
throw new InvalidOperationException("Lost internet connection during upload.");
}
}
}
blockIds.Add(blockId);
offset += bytesToRead;
blockNum++;
}
await blobClient.CommitBlockListAsync(blockIds);
}
private async Task RetryPendingChunksAsync(string filePath, string blobConnectionString, string containerName, string blobName)
{
var blobClient = new BlockBlobClient(blobConnectionString, containerName, blobName);
var pendingBlocks = new List<(string BlockId, long Offset, int Size)>();
using (var conn = new SQLiteConnection(_connectionString))
{
conn.Open();
using var cmd = conn.CreateCommand();
cmd.CommandText = @"SELECT BlockId, Offset, Size FROM UploadChunks
WHERE FilePath = @f AND Uploaded = 0
ORDER BY Offset ASC";
cmd.Parameters.AddWithValue("@f", filePath);
using var reader = cmd.ExecuteReader();
while (reader.Read())
{
string blockId = reader.GetString(0);
long offset = reader.GetInt64(1);
int size = reader.GetInt32(2);
pendingBlocks.Add((blockId, offset, size));
}
}
if (pendingBlocks.Count == 0)
return;
using var fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
foreach (var (blockId, offset, size) in pendingBlocks)
{
byte[] buffer = new byte[size];
fileStream.Seek(offset, SeekOrigin.Begin);
int read = await fileStream.ReadAsync(buffer, 0, size);
bool uploaded = false;
int retry = 0;
while (!uploaded && retry < 5)
{
try
{
using var ms = new MemoryStream(buffer, 0, read);
await blobClient.StageBlockAsync(blockId, ms);
MarkBlockUploaded(filePath, blockId, offset, read);
uploaded = true;
}
catch
{
await Task.Delay(1000 * (retry + 1));
retry++;
if (!IsInternetAvailable())
{
// Wait and try again later
await Task.Delay(TimeSpan.FromMinutes(2));
// Optionally, you could recursively call this method, but avoid infinite recursion
return;
}
if (retry >= 5)
throw new InvalidOperationException("Failed to upload chunk after multiple retries.");
}
}
}
}
// Simple internet connectivity check (ping to a reliable host)
private bool IsInternetAvailable()
{
try
{
using var client = new System.Net.NetworkInformation.Ping();
var reply = client.Send("8.8.8.8", 2000);
return reply.Status == System.Net.NetworkInformation.IPStatus.Success;
}
catch
{
return false;
}
}
private bool IsBlockUploaded(string filePath, string blockId)
{
using var conn = new SQLiteConnection(_connectionString);
conn.Open();
using var cmd = conn.CreateCommand();
cmd.CommandText = "SELECT Uploaded FROM UploadChunks WHERE FilePath = @f AND BlockId = @b";
cmd.Parameters.AddWithValue("@f", filePath);
cmd.Parameters.AddWithValue("@b", blockId);
var result = cmd.ExecuteScalar();
return result != null && Convert.ToInt32(result) == 1;
}
private void MarkBlockUploaded(string filePath, string blockId, long offset, int size)
{
using var conn = new SQLiteConnection(_connectionString);
conn.Open();
using var cmd = conn.CreateCommand();
cmd.CommandText = @"INSERT OR REPLACE INTO UploadChunks (FilePath, BlockId, Offset, Size, Uploaded) VALUES (@f, @b, @o, @s, 1);";
cmd.Parameters.AddWithValue("@f", filePath);
cmd.Parameters.AddWithValue("@b", blockId);
cmd.Parameters.AddWithValue("@o", offset);
cmd.Parameters.AddWithValue("@s", size);
cmd.ExecuteNonQuery();
}
}
}
No comments:
Post a Comment