Posts

Restore all Glacier objects in S3 bucket

If you've got an entire bucket in S3 with items in Glacier or Glacier Deep Archive storage class, and you want to restore them to download them, this C# code will restore all of the files. Just set the constant values at the top and it'll go through the items one-by-one and submit a restore request. No notifications or anything when the restore is done, so maybe just give it a day or so and come back. using static System . Console ; using Amazon ; using Amazon . Runtime ; using Amazon . S3 ; using Amazon . S3 . Model ; const string ACCESS_KEY = " XXXXXXXXXXXXXXXX " ; const string SECRET_KEY = " XXXXXXXXXXXXXXXX " ; var regionEndpoint = RegionEndpoint . XXXXXXXXXXXXXXXX ; const string BUCKET_NAME = " XXXXXXXXXXXXXXXX " ; const int NUM_DAYS = 15 ; var client = new AmazonS3Client ( new BasicAWSCredentials ( ACCESS_KEY , SECRET_KEY ) , regionEndpoint ) ; var request = new ListObjectsRequest { BucketName =

Exception logging in ASP.NET Core

public void HandleException ( IApplicationBuilder app ) { app . Run ( async requestContext = > { try { string path = requestContext . Request . Path ; var exceptionHandlerPathFeature = requestContext . Features . Get < IExceptionHandlerPathFeature > ( ) ; var exception = exceptionHandlerPathFeature ? . Error ; User ? user = null ; try { user = /* implementation */ Cast requestContext . User into your custom user type } catch ( Exception ex ) { Debug . WriteLine ( ex ) ; } if ( exception != null ) { new Thread ( ( ) = > { // Using a new thread to escape any open transactions that are getting rolled back try { using var conn = new SqlConnection ( Configuration . GetConnectionString ( " connstr " ) ) ; conn . Open ( ) ; using var comm = conn . CreateCommand (

Brotli in .NET

Brotli  is a compression algorithm like GZip - it tends to have smaller compressed files at the expense of a small amount of extra time. Implementing in .NET is just as easy as with GZip: public class Brotli { public static void Compress ( Stream inputStream , Stream outputStream ) { using var gzip = new BrotliStream ( outputStream , CompressionMode . Compress ) ; byte [ ] buffer = new byte [ 8192 ] ; int count ; while ( ( count = inputStream . Read ( buffer , 0 , buffer . Length ) ) > 0 ) { gzip . Write ( buffer , 0 , count ) ; } } public static void Decompress ( Stream inputStream , Stream outputStream ) { using var gzip = new BrotliStream ( inputStream , CompressionMode . Decompress ) ; byte [ ] buffer = new byte [ 8192 ] ; int count ; while ( ( count = gzip . Read ( buffer , 0 , buffer . Length ) ) > 0 )

Check progress of SQL Server restore

From MSSQLTips : SELECT session_id as SPID , command , a . text AS Query , start_time , percent_complete , dateadd ( second , estimated_completion_time / 1000 , getdate ( ) ) as estimated_completion_time FROM sys . dm_exec_requests r CROSS APPLY sys . dm_exec_sql_text ( r . sql_handle ) a WHERE r . command in ( 'BACKUP DATABASE' , 'RESTORE DATABASE' )

Simple SQL Server backup/restore

-- Back Up --------------------------------------------- declare @filename nvarchar ( 255 ) = 'C:\backups\' + format ( sysutcdatetime ( ) , 'yyyyMMddHHmmss' ) + '.bak' ; backup database MyDB to disk = @filename ; go -- Restore --------------------------------------------- -- Find the logical names - result will be the MDF and LDF -- logical names: restore filelistonly from disk = 'C:\backups\20201103130743.bak' ; go restore database DifferentDB from disk = 'C:\backups\20201103130743.bak' with replace , move '**MDF Logical Name**' to 'c:\data\DifferentDB.mdf' , move '**LDF Logical Name**' to 'c:\data\DifferentDB_log.ldf' ; go

NodeJS Read/Write Files

const fs = require ( " fs " ) ; const readline = require ( " readline " ) ; // Read full file fs . readFile ( " myfile.txt " , { encoding : " utf-8 " } , ( err , data ) = > { console . log ( " Full file contents: " ) ; console . log ( data ) ; console . log ( " =================== " ) ; } ) ; // Read file line-by-line var lineReader = readline . createInterface ( { input : fs . createReadStream ( " myfile.txt " ) } ) ; lineReader . on ( " line " , line = > { console . log ( ` Line: ${ line } ` ) ; } ) ; lineReader . on ( " close " , ( ) = > console . log ( " DONE " ) ) ; // Write full file fs . writeFile ( " myfile.txt " , " some file contents " , err = > { if ( err ) console . error ( err ) ; } ) ; // Write file line-by-line var outputStream = fs . createWriteStream ( " myfil

NodeJS read/write text files

" use strict " ; ( function ( ) { var fs = require ( " fs " ) ; var readline = require ( " readline " ) ; fs . readFile ( " input.json " , function ( error , data ) { var obj = JSON . parse ( data ) ; fs . writeFile ( " output.json " , JSON . stringify ( obj ) , function ( err ) { if ( err ) { console . log ( " Error: " + err ) ; } else { console . log ( " Saved " ) ; } } ) ; } ) ; ( function ( ) { var outputStream = fs . createWriteStream ( " output.txt " ) ; var lineReader = readline . createInterface ( { input : fs . createReadStream ( " input.txt " ) } ) ; lineReader . on ( " line " , function ( line ) { console . log ( " Line: " + l