diff --git a/.nuget/NuGet.exe b/.nuget/NuGet.exe new file mode 100644 index 00000000..34ad49b4 Binary files /dev/null and b/.nuget/NuGet.exe differ diff --git a/Squirrel.sln b/Squirrel.sln new file mode 100644 index 00000000..ba7c7e56 --- /dev/null +++ b/Squirrel.sln @@ -0,0 +1,22 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2013 +VisualStudioVersion = 12.0.30501.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Squirrel", "src\Squirrel.csproj", "{1436E22A-FE3C-4D68-9A85-9E74DF2E6A92}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {1436E22A-FE3C-4D68-9A85-9E74DF2E6A92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1436E22A-FE3C-4D68-9A85-9E74DF2E6A92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1436E22A-FE3C-4D68-9A85-9E74DF2E6A92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1436E22A-FE3C-4D68-9A85-9E74DF2E6A92}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/specs/ClientImplementation.md b/specs/ClientImplementation.md new file mode 100644 index 00000000..3227e1c9 --- /dev/null +++ b/specs/ClientImplementation.md @@ -0,0 +1,153 @@ +# Client-side Library + +To be able to meet the specifications of the "updates" section of the README +(especially the bits about 'No Reboots', 'Updates should be applied while the +app is running'), we have to be a bit more clever than "Stuff everything in a +folder, hit go". + +### How can you replace DLLs while they're loaded? Impossible! + +You can't. So, how can you do it? The basic trick that ClickOnce uses is, you +have a folder of EXEs and DLLs, and an Application Shortcut. When ClickOnce +goes to update its stuff, it builds a completely *new* folder of binaries, +then the last thing it does is rewrite the app shortcut to point to the new +folder. + +So, to that end, the installation root really only needs to consist of two +folders: + +``` + \packages + MyCoolApp-1.0.nupkg + MyCoolApp-1.1-delta.nupkg + MyCoolApp-1.1.nupkg ## Generated from 1.0+1.1-delta + \app-[version] +``` + +Packages is effectively immutable, it simply consists of the packages we've +downloaded. This means however, that we need write-access to our own install +directory - this is fine for per-user installs, but if the user has installed +to Program Files, we'll need to come up with another solution. And that +solution is, "Only support per-user installs". + +## The Update process, from start to finish + +### Syncing the packages directory + +The first thing that the Squirrel client will do to start the updates process, is +download the remote version of "Releases". Comparing this file to the Releases +file on disk will tell us whether an update is available. + +Determining whether to use the delta packages or not will depend on the +download size - the updater will take the smaller of "latest full package" vs. +"Sum of all delta packages between current and latest". The updater makes a +choice, then fetches down all the files and checks them against the SHA1s in +the Releases file. + +If the installer decided to do a Delta update, it will then use the Delta +updates against the existing Full package to build a new Full package. + +### Installing a full update + +Since we've done the prep work to create a new NuGet package from the deltas, +the actual update process only has to deal with full NuGet packages. This is +as simple as: + +1. Extract the NuGet package to a temp dir +1. Move lib\net40 to \app-[newversion] +1. Rewrite the shortcut to point to \app-[newversion] + +On next startup, we blow away \app-[version] since it's now the previous +version of the code. + +### What do we do on Setup? (Bootstrapping) + +Since the WiX setup application is too dumb to setup our default directory, in +order to simplify trying to bootstrap our app directory, we'll just recreate +it. This is some wasted bandwidth, but oh well. If the packages or app root +doesn't actually exist, we'll download the latest full release and set up the +app. + +### Client-side API + +Referencing Squirrel.Client.dll, `UpdateManager` is all the app dev needs to use. + + UpdateManager + UpdateInfo CheckForUpdates() + UpdateInfo DownloadUpdate() + List ApplyUpdates() + +`UpdateInfo` contains information about pending updates if there is +any, and is null if there isn't. + + UpdateInfo + ReleaseEntry CurrentlyInstalledVersion + ReleaseEntry FutureReleaseEntry + IEnumerable ReleasesToApply + +And `ReleaseEntry` contains the specifics of each release: + + ReleaseEntry + string SHA1 + string Filename + long Filesize + bool IsDelta + +## Applying Updates + +#### A note about Reactive Extensions + +Squirrel uses Reactive Extensions (Rx) heavily as the process necessary to +retrieve, download and apply updates is best done asynchronously. If you +are using the `Microsoft.Bcl.Async` package (which Squirrel also uses) you +can combine the Rx APIs with the TPL async/await keywords, for maximum +simplicity. + +### Check yourself + +First, check the location where your application updates are hosted: + +``` +var updateManager = new UpdateManager(@"C:\Users\brendanforster\Desktop\TestApp", + "TestApp", + FrameworkVersion.Net40); + +var updateInfo = await updateManager.CheckForUpdate(); + +if (updateInfo == null) { + Console.WriteLine("No updates found"); +} else if (!info.ReleasesToApply.Any()) { + Console.WriteLine("You're up to date!"); +} else { + var latest = info.ReleasesToApply.MaxBy(x => x.Version).First(); + Console.WriteLine("You can update to {0}", latest.Version); +} +``` + +Depending on the result you get from this operation, you might: + + - not detect any updates + - be on the latest version + - have one or more versions to apply + +### Fetch all the Updates + +The result from `CheckForUpdates` will contain a list of releases to apply to +your current application. + +That result becomes the input to `DownloadReleases`: + +``` +var releases = updateInfo.ReleasesToApply; + +await updateManager.DownloadReleases(releases); +``` + +### Apply dem Updates + +And lastly, once those updates have been downloaded, tell Squirrel to apply them: + +``` +var results = await updateManager.ApplyReleases(downloadedUpdateInfo); +updateManager.Dispose(); // don't forget to tidy up after yourself +``` diff --git a/specs/Implementation.md b/specs/Implementation.md new file mode 100644 index 00000000..5b22a8ac --- /dev/null +++ b/specs/Implementation.md @@ -0,0 +1,95 @@ +# Implementation + +## Major Pieces + +We need: + +- A client library, which includes the core update logic +- An executable / PowerShell script to implement `New-Release` +- The actual Setup.exe that Create-Release hacks up, as well as any related + implementation (WiX stuff?) that we need. + +## Production / "Server Side" + +### The tricky part + +Ironically, the difficulty of using NuGet packages as a distribution container +for your app, is *if your app uses NuGet*. This is because NuGet (with good +reason!) packages the *list* of dependencies, not the actual binaries. So, if +we were to try to use the NuGet package of the App directly, we'd be missing a +bunch of DLLs. + +So, we need an application that can *flatten* a NuGet dependency tree and +repack the package with all the DLLs. While this is a lot of steps, it's +actually pretty straightforward: + +1. Extract the App's NuGet package to a temp directory. +1. Walk the list of dependencies. For each dependency, extract it on top of + the temp directory (i.e. so that its `lib/*` ends up in the App's dir) +1. Recursively do the same thing (i.e. recurse down the dependency tree) +1. Edit the root NuGet package XML and remove all its explicit dependencies. + +This is kind of the moral equivalent of the Rails Gem "vendor freeze" I guess. + +### Delta Packages + +Now, once we've got a full package, we need to generate a Delta package. To do +this, we'll replace all the DLL/EXEs in the NuGet packages with bsdiff files. +[bspatch/bsdiff](http://code.logos.com/blog/2010/12/binary_patching_with_bsdiff.html) +is a mostly efficient algorithm for calculating diffs between binary files +(especially Native binaries, but it works well for .NET ones too), and a way +to apply them. + +So, this is pretty easy: + +1. Extract the previous NuGet package +1. Extract the current NuGet package +1. Replace every EXE/DLL with the bsdiff. So, `lib\net40\MyCoolApp.exe` + becomes `lib\net40\MyCoolApp.exe.diff`. Create a file that contains a SHA1 + of the expected resulting file and its filesize called + `lib\net40\MyCoolApp.exe.shasum` +1. New DLLs in current get put in verbatim +1. Zip it back up + +The .shasum file has the same format as the Releases file described in the +"'Latest' Pointer" section, except that it will only have one entry. + +So now we've got all of the *metadata* of the original package, just none of +its *contents*. To get the final package, we do the following: + +1. Take the previous version, expand it out +1. Take the delta version, do the same +1. For each DLL in the previous package, we bspatch it, then check the shasum + file to ensure we created the correct resulting file +1. If we find a DLL in the new package, just copy it over +1. If we can't find a bspatch for a file, nuke it (it doesn't exist in the new + rev) +1. Zip it back up + +### ChangeLogs / Release Notes + +To write release notes for each release, we're going to reuse the +`` NuSpec element. However, we're going to standard that you +can write Markdown in this element, and as part of generating a flattened +package, we will render this Markdown as HTML. + +### "Latest" Pointer + +One of the last things we do before finishing `Create-Release` is that we +write out a simple "Releases" file alongside the flattened and Delta NuGet +packages. This is a text file that has the name of all of the release package +filenames in the folder in release order (i.e. oldest at top, newest at +bottom), along with the SHA1 hashes of their contents and their file sizes. +So, something like: + +``` + 94689fede03fed7ab59c24337673a27837f0c3ec MyCoolApp-1.0.nupkg 1004502 + 3a2eadd15dd984e4559f2b4d790ec8badaeb6a39 MyCoolApp-1.1.nupkg 1040561 + 14db31d2647c6d2284882a2e101924a9c409ee67 MyCoolApp-1.1-delta.nupkg 80396 +``` + +This format has a number of advantages - it's dead simple, yet enables us to +check for package corruption, as well as makes it efficient to determine what +to do if a user gets multiple versions behind (i.e. whether it's worth it to +download all of the delta packages to catch them up, or to just download the +latest full package) diff --git a/specs/Installer.md b/specs/Installer.md new file mode 100644 index 00000000..ba492173 --- /dev/null +++ b/specs/Installer.md @@ -0,0 +1,59 @@ +# Installer + +Installer just installs `WixUI` whose job is to: + +1. Run the client code to unpack the latest full NuGet package and finish + initial install. +1. Execute the uninstaller code when WiX goes to remove us, and remove the App + directory. + +### So, on install: + +1. WiX unpacks `WixUI` and runs it, and puts an entry in *Programs and + Features*. +1. `WixUI` executes initial install using `Squirrel.Client` for the full + NuGet package, doing the update in-place so the installer never needs to be + rebuilt. + +### On Uninstall: + +1. WiX gets notified about the uninstall, calls `WixUI` to do app + uninstall via `Squirrel.Client` +1. WiX then blows away `WixUI`, the "real" installed app. + +## Bootstrap UI + +`WixUI` has an extremely simple UI when it does its work, it just pops +up, shows a progress bar, a-la Chrome Installer: + +![](http://t0.gstatic.com/images?q=tbn:ANd9GcS_DuuEyOX1lfeo_jDetHLiE17pp_4M-Xerj2ieGEkvQQ4h83w57IL5KD6Kzw) + +On Uninstall, there is no UI, it's solely in the background. + +If Setup.exe gets invoked with the 'Install' action, and the app is already +installed, we just execute the app, a-la ClickOnce. + +## Generating the WiX installer + +The WiX install script is generated via a Mustache template, whose contents +are primarily populated via the generated NuGet release package. WiX will end +up installing `WixUI`, the latest NuGet package file, and a one-line +RELEASES file (meaning that what WiX installs is technically a valid Squirrel +remote update directory). + +## WiX Engine Events and what we should do about them + +* `DetectedPackage` - if we're installed (determine this by looking at the + NuGet package in the same directory as the app), we run the app and bail. + +* `DetectComplete` - Do what we're actually here to do (invoke the Squirrel + installer), then on the UI thread, tell WiX to finish up. + +* `PlanPackageBegin` - squelch installation of .NET 4 + +* `PlanComplete` - Push WiX to to Apply state + +* `ApplyComplete` - If something bad happened, switch to UI Error state, + otherwise start the app if we're in Interactive Mode and call Shutdown() + +* `ExecuteError` - Switch to the UI Error state diff --git a/specs/Scenarios.md b/specs/Scenarios.md new file mode 100644 index 00000000..d6a4b877 --- /dev/null +++ b/specs/Scenarios.md @@ -0,0 +1,41 @@ +## Scenarios + +#### Production + +I'm a developer with a WPF application. I have *zero* way to distribute my +application at the moment. I go to NuGet and install the Squirrel client library. + +Now, I want to publish a release. To do so, I pop into the PowerShell Console +and type `New-Release`. What does this do? It: + +* Creates a NuGet package of my app (i.e. via shelling out to NuGet.exe or w/e) +* It puts the package in a special "Releases" directory of my solution (along + perhaps with a special "delta package" for updates) +* It also creates a Setup.exe that I can distribute to people +* Can also transform `changelog.md` to `changelog.html` using the bundled + Markdown library that ships with Squirrel + +I've created a new release. Now, I want to share it with the world! I upload +the contents of my Releases directory verbatim to the web via S3 / FTP / +whatever. + +In my app, I call `bool +UpdateManager.CheckForUpdates("http://mycoolsite.com/releases/")` - similar to +ClickOnce API but not awful. The library helps me check for updates, get the +ChangeLog HTML to render, and if I'm really lazy, I can just call +`UpdateManager.ShowUpdateNotification()` and get a stock WPF dialog walking +the user through the upgrade. For production applications, I get the +information I need to create my own update experience (yet I don't have to do +any of the actual heavy lifting). + +When I call `UpdateManager.Upgrade()`, the application does the update in the +background, without disturbing the user at all - the next time the app +restarts, it's the new version. + + +#### Users + +I click on a link, and a setup experience starts up. Instead of the usual +"Next >" buttons, I see a single "Install" button (think Visual Studio 2012 installer). +Clicking that installs and immediately opens the application. No UAC prompts, +no long waits. diff --git a/specs/Tools.md b/specs/Tools.md new file mode 100644 index 00000000..0942911a --- /dev/null +++ b/specs/Tools.md @@ -0,0 +1,24 @@ +## Scenarios + +At the end of the day, here's how a developer will use Squirrel: + +1. Add the **Squirrel** package to your application +1. As part of the install for Squirrel, NuGet Package Build is enabled in the csproj file +1. The user edits the generated `.nuspec` to specify some details about their app +1. From the NuGet package console, run `New-Release` - this builds the + world, and you end up with a `$SolutionDir/Releases` folder that has both a + Squirrel release package as well as a `Setup.exe` + +## How does this work: + +1. Call `$DTE` to build the current project, including the NuGet packages +1. Look at all of the projects which have references to `Squirrel.Client` +1. Look up the build output directory for those projects, run + `CreateReleasePackage.exe` on all of the .nupkg files +1. Using the generated NuGet package, fill in the `Template.wxs` file +1. Create a temporary directory for the contents of the Setup.exe, copy in the + `Squirrel.WiXUi.dll` as well as any DLL Project that references + `Squirrel.Client.dll` +1. Run `Candle` and `Light` to generate a `Setup.exe`, which contains + Squirrel.WiXUi.dll and friends, any custom UI DLLs, and the latest full + `nupkg` file. diff --git a/src/BinaryPatchUtility.cs b/src/BinaryPatchUtility.cs new file mode 100644 index 00000000..1bbd1820 --- /dev/null +++ b/src/BinaryPatchUtility.cs @@ -0,0 +1,899 @@ +using System; +using System.IO; +using Ionic.BZip2; + +// Adapted from https://github.com/LogosBible/bsdiff.net/blob/master/src/bsdiff/BinaryPatchUtility.cs + +namespace Squirrel.Core +{ + /* + The original bsdiff.c source code (http://www.daemonology.net/bsdiff/) is + distributed under the following license: + + Copyright 2003-2005 Colin Percival + All rights reserved + + Redistribution and use in source and binary forms, with or without + modification, are permitted providing that the following conditions + are met: + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + */ + class BinaryPatchUtility + { + /// + /// Creates a binary patch (in bsdiff format) that can be used + /// (by ) to transform into . + /// + /// The original binary data. + /// The new binary data. + /// A to which the patch will be written. + public static void Create(byte[] oldData, byte[] newData, Stream output) + { + // check arguments + if (oldData == null) + throw new ArgumentNullException("oldData"); + if (newData == null) + throw new ArgumentNullException("newData"); + if (output == null) + throw new ArgumentNullException("output"); + if (!output.CanSeek) + throw new ArgumentException("Output stream must be seekable.", "output"); + if (!output.CanWrite) + throw new ArgumentException("Output stream must be writable.", "output"); + + /* Header is + 0 8 "BSDIFF40" + 8 8 length of bzip2ed ctrl block + 16 8 length of bzip2ed diff block + 24 8 length of new file */ + /* File is + 0 32 Header + 32 ?? Bzip2ed ctrl block + ?? ?? Bzip2ed diff block + ?? ?? Bzip2ed extra block */ + byte[] header = new byte[c_headerSize]; + WriteInt64(c_fileSignature, header, 0); // "BSDIFF40" + WriteInt64(0, header, 8); + WriteInt64(0, header, 16); + WriteInt64(newData.Length, header, 24); + + long startPosition = output.Position; + output.Write(header, 0, header.Length); + + int[] I = SuffixSort(oldData); + + byte[] db = new byte[newData.Length + 1]; + byte[] eb = new byte[newData.Length + 1]; + + int dblen = 0; + int eblen = 0; + + using (WrappingStream wrappingStream = new WrappingStream(output, Ownership.None)) + using (BZip2OutputStream bz2Stream = new BZip2OutputStream(wrappingStream)) + { + // compute the differences, writing ctrl as we go + int scan = 0; + int pos = 0; + int len = 0; + int lastscan = 0; + int lastpos = 0; + int lastoffset = 0; + while (scan < newData.Length) + { + int oldscore = 0; + + for (int scsc = scan += len; scan < newData.Length; scan++) + { + len = Search(I, oldData, newData, scan, 0, oldData.Length, out pos); + + for (; scsc < scan + len; scsc++) + { + if ((scsc + lastoffset < oldData.Length) && (oldData[scsc + lastoffset] == newData[scsc])) + oldscore++; + } + + if ((len == oldscore && len != 0) || (len > oldscore + 8)) + break; + + if ((scan + lastoffset < oldData.Length) && (oldData[scan + lastoffset] == newData[scan])) + oldscore--; + } + + if (len != oldscore || scan == newData.Length) + { + int s = 0; + int sf = 0; + int lenf = 0; + for (int i = 0; (lastscan + i < scan) && (lastpos + i < oldData.Length); ) + { + if (oldData[lastpos + i] == newData[lastscan + i]) + s++; + i++; + if (s * 2 - i > sf * 2 - lenf) + { + sf = s; + lenf = i; + } + } + + int lenb = 0; + if (scan < newData.Length) + { + s = 0; + int sb = 0; + for (int i = 1; (scan >= lastscan + i) && (pos >= i); i++) + { + if (oldData[pos - i] == newData[scan - i]) + s++; + if (s * 2 - i > sb * 2 - lenb) + { + sb = s; + lenb = i; + } + } + } + + if (lastscan + lenf > scan - lenb) + { + int overlap = (lastscan + lenf) - (scan - lenb); + s = 0; + int ss = 0; + int lens = 0; + for (int i = 0; i < overlap; i++) + { + if (newData[lastscan + lenf - overlap + i] == oldData[lastpos + lenf - overlap + i]) + s++; + if (newData[scan - lenb + i] == oldData[pos - lenb + i]) + s--; + if (s > ss) + { + ss = s; + lens = i + 1; + } + } + + lenf += lens - overlap; + lenb -= lens; + } + + for (int i = 0; i < lenf; i++) + db[dblen + i] = (byte)(newData[lastscan + i] - oldData[lastpos + i]); + for (int i = 0; i < (scan - lenb) - (lastscan + lenf); i++) + eb[eblen + i] = newData[lastscan + lenf + i]; + + dblen += lenf; + eblen += (scan - lenb) - (lastscan + lenf); + + byte[] buf = new byte[8]; + WriteInt64(lenf, buf, 0); + bz2Stream.Write(buf, 0, 8); + + WriteInt64((scan - lenb) - (lastscan + lenf), buf, 0); + bz2Stream.Write(buf, 0, 8); + + WriteInt64((pos - lenb) - (lastpos + lenf), buf, 0); + bz2Stream.Write(buf, 0, 8); + + lastscan = scan - lenb; + lastpos = pos - lenb; + lastoffset = pos - scan; + } + } + } + + // compute size of compressed ctrl data + long controlEndPosition = output.Position; + WriteInt64(controlEndPosition - startPosition - c_headerSize, header, 8); + + // write compressed diff data + using (WrappingStream wrappingStream = new WrappingStream(output, Ownership.None)) + using (BZip2OutputStream bz2Stream = new BZip2OutputStream(wrappingStream)) + { + bz2Stream.Write(db, 0, dblen); + } + + // compute size of compressed diff data + long diffEndPosition = output.Position; + WriteInt64(diffEndPosition - controlEndPosition, header, 16); + + // write compressed extra data + using (WrappingStream wrappingStream = new WrappingStream(output, Ownership.None)) + using (BZip2OutputStream bz2Stream = new BZip2OutputStream(wrappingStream)) + { + bz2Stream.Write(eb, 0, eblen); + } + + // seek to the beginning, write the header, then seek back to end + long endPosition = output.Position; + output.Position = startPosition; + output.Write(header, 0, header.Length); + output.Position = endPosition; + } + + /// + /// Applies a binary patch (in bsdiff format) to the data in + /// and writes the results of patching to . + /// + /// A containing the input data. + /// A func that can open a positioned at the start of the patch data. + /// This stream must support reading and seeking, and must allow multiple streams on + /// the patch to be opened concurrently. + /// A to which the patched data is written. + public static void Apply(Stream input, Func openPatchStream, Stream output) + { + // check arguments + if (input == null) + throw new ArgumentNullException("input"); + if (openPatchStream == null) + throw new ArgumentNullException("openPatchStream"); + if (output == null) + throw new ArgumentNullException("output"); + + /* + File format: + 0 8 "BSDIFF40" + 8 8 X + 16 8 Y + 24 8 sizeof(newfile) + 32 X bzip2(control block) + 32+X Y bzip2(diff block) + 32+X+Y ??? bzip2(extra block) + with control block a set of triples (x,y,z) meaning "add x bytes + from oldfile to x bytes from the diff block; copy y bytes from the + extra block; seek forwards in oldfile by z bytes". + */ + // read header + long controlLength, diffLength, newSize; + using (Stream patchStream = openPatchStream()) + { + // check patch stream capabilities + if (!patchStream.CanRead) + throw new ArgumentException("Patch stream must be readable.", "openPatchStream"); + if (!patchStream.CanSeek) + throw new ArgumentException("Patch stream must be seekable.", "openPatchStream"); + + byte[] header = patchStream.ReadExactly(c_headerSize); + + // check for appropriate magic + long signature = ReadInt64(header, 0); + if (signature != c_fileSignature) + throw new InvalidOperationException("Corrupt patch."); + + // read lengths from header + controlLength = ReadInt64(header, 8); + diffLength = ReadInt64(header, 16); + newSize = ReadInt64(header, 24); + if (controlLength < 0 || diffLength < 0 || newSize < 0) + throw new InvalidOperationException("Corrupt patch."); + } + + // preallocate buffers for reading and writing + const int c_bufferSize = 1048576; + byte[] newData = new byte[c_bufferSize]; + byte[] oldData = new byte[c_bufferSize]; + + // prepare to read three parts of the patch in parallel + using (Stream compressedControlStream = openPatchStream()) + using (Stream compressedDiffStream = openPatchStream()) + using (Stream compressedExtraStream = openPatchStream()) + { + // seek to the start of each part + compressedControlStream.Seek(c_headerSize, SeekOrigin.Current); + compressedDiffStream.Seek(c_headerSize + controlLength, SeekOrigin.Current); + compressedExtraStream.Seek(c_headerSize + controlLength + diffLength, SeekOrigin.Current); + + // decompress each part (to read it) + using (BZip2InputStream controlStream = new BZip2InputStream(compressedControlStream)) + using (BZip2InputStream diffStream = new BZip2InputStream(compressedDiffStream)) + using (BZip2InputStream extraStream = new BZip2InputStream(compressedExtraStream)) + { + long[] control = new long[3]; + byte[] buffer = new byte[8]; + + int oldPosition = 0; + int newPosition = 0; + while (newPosition < newSize) + { + // read control data + for (int i = 0; i < 3; i++) + { + controlStream.ReadExactly(buffer, 0, 8); + control[i] = ReadInt64(buffer, 0); + } + + // sanity-check + if (newPosition + control[0] > newSize) + throw new InvalidOperationException("Corrupt patch."); + + // seek old file to the position that the new data is diffed against + input.Position = oldPosition; + + int bytesToCopy = (int)control[0]; + while (bytesToCopy > 0) + { + int actualBytesToCopy = Math.Min(bytesToCopy, c_bufferSize); + + // read diff string + diffStream.ReadExactly(newData, 0, actualBytesToCopy); + + // add old data to diff string + int availableInputBytes = Math.Min(actualBytesToCopy, (int)(input.Length - input.Position)); + input.ReadExactly(oldData, 0, availableInputBytes); + + for (int index = 0; index < availableInputBytes; index++) + newData[index] += oldData[index]; + + output.Write(newData, 0, actualBytesToCopy); + + // adjust counters + newPosition += actualBytesToCopy; + oldPosition += actualBytesToCopy; + bytesToCopy -= actualBytesToCopy; + } + + // sanity-check + if (newPosition + control[1] > newSize) + throw new InvalidOperationException("Corrupt patch."); + + // read extra string + bytesToCopy = (int)control[1]; + while (bytesToCopy > 0) + { + int actualBytesToCopy = Math.Min(bytesToCopy, c_bufferSize); + + extraStream.ReadExactly(newData, 0, actualBytesToCopy); + output.Write(newData, 0, actualBytesToCopy); + + newPosition += actualBytesToCopy; + bytesToCopy -= actualBytesToCopy; + } + + // adjust position + oldPosition = (int)(oldPosition + control[2]); + } + } + } + } + + private static int CompareBytes(byte[] left, int leftOffset, byte[] right, int rightOffset) + { + for (int index = 0; index < left.Length - leftOffset && index < right.Length - rightOffset; index++) + { + int diff = left[index + leftOffset] - right[index + rightOffset]; + if (diff != 0) + return diff; + } + return 0; + } + + private static int MatchLength(byte[] oldData, int oldOffset, byte[] newData, int newOffset) + { + int i; + for (i = 0; i < oldData.Length - oldOffset && i < newData.Length - newOffset; i++) + { + if (oldData[i + oldOffset] != newData[i + newOffset]) + break; + } + return i; + } + + private static int Search(int[] I, byte[] oldData, byte[] newData, int newOffset, int start, int end, out int pos) + { + if (end - start < 2) + { + int startLength = MatchLength(oldData, I[start], newData, newOffset); + int endLength = MatchLength(oldData, I[end], newData, newOffset); + + if (startLength > endLength) + { + pos = I[start]; + return startLength; + } + else + { + pos = I[end]; + return endLength; + } + } + else + { + int midPoint = start + (end - start) / 2; + return CompareBytes(oldData, I[midPoint], newData, newOffset) < 0 ? + Search(I, oldData, newData, newOffset, midPoint, end, out pos) : + Search(I, oldData, newData, newOffset, start, midPoint, out pos); + } + } + + private static void Split(int[] I, int[] v, int start, int len, int h) + { + if (len < 16) + { + int j; + for (int k = start; k < start + len; k += j) + { + j = 1; + int x = v[I[k] + h]; + for (int i = 1; k + i < start + len; i++) + { + if (v[I[k + i] + h] < x) + { + x = v[I[k + i] + h]; + j = 0; + } + if (v[I[k + i] + h] == x) + { + Swap(ref I[k + j], ref I[k + i]); + j++; + } + } + for (int i = 0; i < j; i++) + v[I[k + i]] = k + j - 1; + if (j == 1) + I[k] = -1; + } + } + else + { + int x = v[I[start + len / 2] + h]; + int jj = 0; + int kk = 0; + for (int i2 = start; i2 < start + len; i2++) + { + if (v[I[i2] + h] < x) + jj++; + if (v[I[i2] + h] == x) + kk++; + } + jj += start; + kk += jj; + + int i = start; + int j = 0; + int k = 0; + while (i < jj) + { + if (v[I[i] + h] < x) + { + i++; + } + else if (v[I[i] + h] == x) + { + Swap(ref I[i], ref I[jj + j]); + j++; + } + else + { + Swap(ref I[i], ref I[kk + k]); + k++; + } + } + + while (jj + j < kk) + { + if (v[I[jj + j] + h] == x) + { + j++; + } + else + { + Swap(ref I[jj + j], ref I[kk + k]); + k++; + } + } + + if (jj > start) + Split(I, v, start, jj - start, h); + + for (i = 0; i < kk - jj; i++) + v[I[jj + i]] = kk - 1; + if (jj == kk - 1) + I[jj] = -1; + + if (start + len > kk) + Split(I, v, kk, start + len - kk, h); + } + } + + private static int[] SuffixSort(byte[] oldData) + { + int[] buckets = new int[256]; + + foreach (byte oldByte in oldData) + buckets[oldByte]++; + for (int i = 1; i < 256; i++) + buckets[i] += buckets[i - 1]; + for (int i = 255; i > 0; i--) + buckets[i] = buckets[i - 1]; + buckets[0] = 0; + + int[] I = new int[oldData.Length + 1]; + for (int i = 0; i < oldData.Length; i++) + I[++buckets[oldData[i]]] = i; + + int[] v = new int[oldData.Length + 1]; + for (int i = 0; i < oldData.Length; i++) + v[i] = buckets[oldData[i]]; + + for (int i = 1; i < 256; i++) + { + if (buckets[i] == buckets[i - 1] + 1) + I[buckets[i]] = -1; + } + I[0] = -1; + + for (int h = 1; I[0] != -(oldData.Length + 1); h += h) + { + int len = 0; + int i = 0; + while (i < oldData.Length + 1) + { + if (I[i] < 0) + { + len -= I[i]; + i -= I[i]; + } + else + { + if (len != 0) + I[i - len] = -len; + len = v[I[i]] + 1 - i; + Split(I, v, i, len, h); + i += len; + len = 0; + } + } + + if (len != 0) + I[i - len] = -len; + } + + for (int i = 0; i < oldData.Length + 1; i++) + I[v[i]] = i; + + return I; + } + + private static void Swap(ref int first, ref int second) + { + int temp = first; + first = second; + second = temp; + } + + private static long ReadInt64(byte[] buf, int offset) + { + long value = buf[offset + 7] & 0x7F; + + for (int index = 6; index >= 0; index--) + { + value *= 256; + value += buf[offset + index]; + } + + if ((buf[offset + 7] & 0x80) != 0) + value = -value; + + return value; + } + + private static void WriteInt64(long value, byte[] buf, int offset) + { + long valueToWrite = value < 0 ? -value : value; + + for (int byteIndex = 0; byteIndex < 8; byteIndex++) + { + buf[offset + byteIndex] = (byte)(valueToWrite % 256); + valueToWrite -= buf[offset + byteIndex]; + valueToWrite /= 256; + } + + if (value < 0) + buf[offset + 7] |= 0x80; + } + + const long c_fileSignature = 0x3034464649445342L; + const int c_headerSize = 32; + } + + /// + /// A that wraps another stream. One major feature of is that it does not dispose the + /// underlying stream when it is disposed if Ownership.None is used; this is useful when using classes such as and + /// that take ownership of the stream passed to their constructors. + /// + /// See WrappingStream Implementation. + public class WrappingStream : Stream + { + /// + /// Initializes a new instance of the class. + /// + /// The wrapped stream. + /// Use Owns if the wrapped stream should be disposed when this stream is disposed. + public WrappingStream(Stream streamBase, Ownership ownership) + { + // check parameters + if (streamBase == null) + throw new ArgumentNullException("streamBase"); + + m_streamBase = streamBase; + m_ownership = ownership; + } + + /// + /// Gets a value indicating whether the current stream supports reading. + /// + /// true if the stream supports reading; otherwise, false. + public override bool CanRead + { + get { return m_streamBase == null ? false : m_streamBase.CanRead; } + } + + /// + /// Gets a value indicating whether the current stream supports seeking. + /// + /// true if the stream supports seeking; otherwise, false. + public override bool CanSeek + { + get { return m_streamBase == null ? false : m_streamBase.CanSeek; } + } + + /// + /// Gets a value indicating whether the current stream supports writing. + /// + /// true if the stream supports writing; otherwise, false. + public override bool CanWrite + { + get { return m_streamBase == null ? false : m_streamBase.CanWrite; } + } + + /// + /// Gets the length in bytes of the stream. + /// + public override long Length + { + get { ThrowIfDisposed(); return m_streamBase.Length; } + } + + /// + /// Gets or sets the position within the current stream. + /// + public override long Position + { + get { ThrowIfDisposed(); return m_streamBase.Position; } + set { ThrowIfDisposed(); m_streamBase.Position = value; } + } + + /// + /// Begins an asynchronous read operation. + /// + public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + ThrowIfDisposed(); + return m_streamBase.BeginRead(buffer, offset, count, callback, state); + } + + /// + /// Begins an asynchronous write operation. + /// + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + ThrowIfDisposed(); + return m_streamBase.BeginWrite(buffer, offset, count, callback, state); + } + + /// + /// Waits for the pending asynchronous read to complete. + /// + public override int EndRead(IAsyncResult asyncResult) + { + ThrowIfDisposed(); + return m_streamBase.EndRead(asyncResult); + } + + /// + /// Ends an asynchronous write operation. + /// + public override void EndWrite(IAsyncResult asyncResult) + { + ThrowIfDisposed(); + m_streamBase.EndWrite(asyncResult); + } + + /// + /// Clears all buffers for this stream and causes any buffered data to be written to the underlying device. + /// + public override void Flush() + { + ThrowIfDisposed(); + m_streamBase.Flush(); + } + + /// + /// Reads a sequence of bytes from the current stream and advances the position + /// within the stream by the number of bytes read. + /// + public override int Read(byte[] buffer, int offset, int count) + { + ThrowIfDisposed(); + return m_streamBase.Read(buffer, offset, count); + } + + /// + /// Reads a byte from the stream and advances the position within the stream by one byte, or returns -1 if at the end of the stream. + /// + public override int ReadByte() + { + ThrowIfDisposed(); + return m_streamBase.ReadByte(); + } + + /// + /// Sets the position within the current stream. + /// + /// A byte offset relative to the parameter. + /// A value of type indicating the reference point used to obtain the new position. + /// The new position within the current stream. + public override long Seek(long offset, SeekOrigin origin) + { + ThrowIfDisposed(); + return m_streamBase.Seek(offset, origin); + } + + /// + /// Sets the length of the current stream. + /// + /// The desired length of the current stream in bytes. + public override void SetLength(long value) + { + ThrowIfDisposed(); + m_streamBase.SetLength(value); + } + + /// + /// Writes a sequence of bytes to the current stream and advances the current position + /// within this stream by the number of bytes written. + /// + public override void Write(byte[] buffer, int offset, int count) + { + ThrowIfDisposed(); + m_streamBase.Write(buffer, offset, count); + } + + /// + /// Writes a byte to the current position in the stream and advances the position within the stream by one byte. + /// + public override void WriteByte(byte value) + { + ThrowIfDisposed(); + m_streamBase.WriteByte(value); + } + + /// + /// Gets the wrapped stream. + /// + /// The wrapped stream. + protected Stream WrappedStream + { + get { return m_streamBase; } + } + + /// + /// Releases the unmanaged resources used by the and optionally releases the managed resources. + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + protected override void Dispose(bool disposing) + { + try + { + // doesn't close the base stream, but just prevents access to it through this WrappingStream + if (disposing) + { + if (m_streamBase != null && m_ownership == Ownership.Owns) + m_streamBase.Dispose(); + m_streamBase = null; + } + } + finally + { + base.Dispose(disposing); + } + } + + private void ThrowIfDisposed() + { + // throws an ObjectDisposedException if this object has been disposed + if (m_streamBase == null) + throw new ObjectDisposedException(GetType().Name); + } + + Stream m_streamBase; + readonly Ownership m_ownership; + } + + /// + /// Indicates whether an object takes ownership of an item. + /// + public enum Ownership + { + /// + /// The object does not own this item. + /// + None, + + /// + /// The object owns this item, and is responsible for releasing it. + /// + Owns + } + + /// + /// Provides helper methods for working with . + /// + public static class StreamUtility + { + /// + /// Reads exactly bytes from . + /// + /// The stream to read from. + /// The count of bytes to read. + /// A new byte array containing the data read from the stream. + public static byte[] ReadExactly(this Stream stream, int count) + { + if (count < 0) + throw new ArgumentOutOfRangeException("count"); + byte[] buffer = new byte[count]; + ReadExactly(stream, buffer, 0, count); + return buffer; + } + + /// + /// Reads exactly bytes from into + /// , starting at the byte given by . + /// + /// The stream to read from. + /// The buffer to read data into. + /// The offset within the buffer at which data is first written. + /// The count of bytes to read. + public static void ReadExactly(this Stream stream, byte[] buffer, int offset, int count) + { + // check arguments + if (stream == null) + throw new ArgumentNullException("stream"); + if (buffer == null) + throw new ArgumentNullException("buffer"); + if (offset < 0 || offset > buffer.Length) + throw new ArgumentOutOfRangeException("offset"); + if (count < 0 || buffer.Length - offset < count) + throw new ArgumentOutOfRangeException("count"); + + while (count > 0) + { + // read data + int bytesRead = stream.Read(buffer, offset, count); + + // check for failure to read + if (bytesRead == 0) + throw new EndOfStreamException(); + + // move to next block + offset += bytesRead; + count -= bytesRead; + } + } + } +} diff --git a/src/ContentType.cs b/src/ContentType.cs new file mode 100644 index 00000000..e1382a78 --- /dev/null +++ b/src/ContentType.cs @@ -0,0 +1,40 @@ +using System; +using System.Linq; +using System.Xml; + +namespace Squirrel.Core +{ + internal static class ContentType + { + public static void Merge(XmlDocument doc) + { + var elements = new [] { + Tuple.Create("Default", "diff", "application/octet" ), + Tuple.Create("Default", "exe", "application/octet" ), + Tuple.Create("Default", "dll", "application/octet" ), + Tuple.Create("Default", "shasum", "text/plain" ), + }; + + var typesElement = doc.FirstChild.NextSibling; + if (typesElement.Name.ToLowerInvariant() != "types") { + throw new Exception("Invalid ContentTypes file, expected root node should be 'Types'"); + } + + var existingTypes = typesElement.ChildNodes.OfType() + .Select(k => Tuple.Create(k.Name, + k.GetAttribute("Extension").ToLowerInvariant(), + k.GetAttribute("ContentType").ToLowerInvariant())); + + elements + .Where(x => existingTypes.All(t => t.Item2 != x.Item2.ToLowerInvariant())) + .Select(element => { + var ret = doc.CreateElement(element.Item1, typesElement.NamespaceURI); + var ext = doc.CreateAttribute("Extension"); ext.Value = element.Item2; + var ct = doc.CreateAttribute("ContentType"); ct.Value = element.Item3; + new[] { ext, ct }.ForEach(x => ret.Attributes.Append(x)); + + return ret; + }).ForEach(x => typesElement.AppendChild(x)); + } + } +} diff --git a/src/DeltaPackage.cs b/src/DeltaPackage.cs new file mode 100644 index 00000000..36036fd0 --- /dev/null +++ b/src/DeltaPackage.cs @@ -0,0 +1,263 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Contracts; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using Ionic.Zip; +using ReactiveUIMicro; + +namespace Squirrel.Core +{ + public interface IDeltaPackageBuilder + { + ReleasePackage CreateDeltaPackage(ReleasePackage basePackage, ReleasePackage newPackage, string outputFile); + ReleasePackage ApplyDeltaPackage(ReleasePackage basePackage, ReleasePackage deltaPackage, string outputFile); + } + + public class DeltaPackageBuilder : IEnableLogger, IDeltaPackageBuilder + { + public ReleasePackage CreateDeltaPackage(ReleasePackage basePackage, ReleasePackage newPackage, string outputFile) + { + Contract.Requires(basePackage != null); + Contract.Requires(!String.IsNullOrEmpty(outputFile) && !File.Exists(outputFile)); + + if (basePackage.Version > newPackage.Version) { + var message = String.Format( + "You cannot create a delta package based on version {0} as it is a later version than {1}", + basePackage.Version, + newPackage.Version); + throw new InvalidOperationException(message); + } + + if (basePackage.ReleasePackageFile == null) { + throw new ArgumentException("The base package's release file is null", "basePackage"); + } + + if (!File.Exists(basePackage.ReleasePackageFile)) { + throw new FileNotFoundException("The base package release does not exist", basePackage.ReleasePackageFile); + } + + if (!File.Exists(newPackage.ReleasePackageFile)) { + throw new FileNotFoundException("The new package release does not exist", newPackage.ReleasePackageFile); + } + + string baseTempPath = null; + string tempPath = null; + + using (Utility.WithTempDirectory(out baseTempPath)) + using (Utility.WithTempDirectory(out tempPath)) { + var baseTempInfo = new DirectoryInfo(baseTempPath); + var tempInfo = new DirectoryInfo(tempPath); + + using (var zf = new ZipFile(basePackage.ReleasePackageFile)) { + zf.ExtractAll(baseTempInfo.FullName); + } + + using (var zf = new ZipFile(newPackage.ReleasePackageFile)) { + zf.ExtractAll(tempInfo.FullName); + } + + // Collect a list of relative paths under 'lib' and map them + // to their full name. We'll use this later to determine in + // the new version of the package whether the file exists or + // not. + var baseLibFiles = baseTempInfo.GetAllFilesRecursively() + .Where(x => x.FullName.ToLowerInvariant().Contains("lib" + Path.DirectorySeparatorChar)) + .ToDictionary(k => k.FullName.Replace(baseTempInfo.FullName, ""), v => v.FullName); + + var newLibDir = tempInfo.GetDirectories().First(x => x.Name.ToLowerInvariant() == "lib"); + + newLibDir.GetAllFilesRecursively() + .ForEach(libFile => createDeltaForSingleFile(libFile, tempInfo, baseLibFiles)); + + ReleasePackage.addDeltaFilesToContentTypes(tempInfo.FullName); + + using (var zf = new ZipFile(outputFile)) { + zf.AddDirectory(tempInfo.FullName); + zf.Save(); + } + } + + return new ReleasePackage(outputFile); + } + + public ReleasePackage ApplyDeltaPackage(ReleasePackage basePackage, ReleasePackage deltaPackage, string outputFile) + { + Contract.Requires(deltaPackage != null); + Contract.Requires(!String.IsNullOrEmpty(outputFile) && !File.Exists(outputFile)); + + string workingPath; + string deltaPath; + + using (Utility.WithTempDirectory(out deltaPath)) + using (Utility.WithTempDirectory(out workingPath)) + using (var deltaZip = new ZipFile(deltaPackage.InputPackageFile)) + using (var baseZip = new ZipFile(basePackage.InputPackageFile)) { + deltaZip.ExtractAll(deltaPath); + baseZip.ExtractAll(workingPath); + + var pathsVisited = new List(); + + var deltaPathRelativePaths = new DirectoryInfo(deltaPath).GetAllFilesRecursively() + .Select(x => x.FullName.Replace(deltaPath + Path.DirectorySeparatorChar, "")) + .ToArray(); + + // Apply all of the .diff files + deltaPathRelativePaths + .Where(x => x.StartsWith("lib", StringComparison.InvariantCultureIgnoreCase)) + .ForEach(file => { + pathsVisited.Add(Regex.Replace(file, @".diff$", "").ToLowerInvariant()); + applyDiffToFile(deltaPath, file, workingPath); + }); + + // Delete all of the files that were in the old package but + // not in the new one. + new DirectoryInfo(workingPath).GetAllFilesRecursively() + .Select(x => x.FullName.Replace(workingPath + Path.DirectorySeparatorChar, "").ToLowerInvariant()) + .Where(x => x.StartsWith("lib", StringComparison.InvariantCultureIgnoreCase) && !pathsVisited.Contains(x)) + .ForEach(x => { + this.Log().Info("{0} was in old package but not in new one, deleting", x); + File.Delete(Path.Combine(workingPath, x)); + }); + + // Update all the files that aren't in 'lib' with the delta + // package's versions (i.e. the nuspec file, etc etc). + deltaPathRelativePaths + .Where(x => !x.StartsWith("lib", StringComparison.InvariantCultureIgnoreCase)) + .ForEach(x => { + this.Log().Info("Updating metadata file: {0}", x); + File.Copy(Path.Combine(deltaPath, x), Path.Combine(workingPath, x), true); + }); + + using (var zf = new ZipFile(outputFile)) { + zf.AddDirectory(workingPath); + zf.Save(); + } + } + + return new ReleasePackage(outputFile); + } + + void createDeltaForSingleFile(FileInfo targetFile, DirectoryInfo workingDirectory, Dictionary baseFileListing) + { + // NB: There are three cases here that we'll handle: + // + // 1. Exists only in new => leave it alone, we'll use it directly. + // 2. Exists in both old and new => write a dummy file so we know + // to keep it. + // 3. Exists in old but changed in new => create a delta file + // + // The fourth case of "Exists only in old => delete it in new" + // is handled when we apply the delta package + var relativePath = targetFile.FullName.Replace(workingDirectory.FullName, ""); + + if (!baseFileListing.ContainsKey(relativePath)) { + this.Log().Info("{0} not found in base package, marking as new", relativePath); + return; + } + + var oldData = File.ReadAllBytes(baseFileListing[relativePath]); + var newData = File.ReadAllBytes(targetFile.FullName); + + if (bytesAreIdentical(oldData, newData)) { + this.Log().Info("{0} hasn't changed, writing dummy file", relativePath); + + File.Create(targetFile.FullName + ".diff").Dispose(); + File.Create(targetFile.FullName + ".shasum").Dispose(); + targetFile.Delete(); + return; + } + + this.Log().Info("Delta patching {0} => {1}", baseFileListing[relativePath], targetFile.FullName); + using (var of = File.Create(targetFile.FullName + ".diff")) { + BinaryPatchUtility.Create(oldData, newData, of); + + var rl = ReleaseEntry.GenerateFromFile(new MemoryStream(newData), targetFile.Name + ".shasum"); + File.WriteAllText(targetFile.FullName + ".shasum", rl.EntryAsString, Encoding.UTF8); + targetFile.Delete(); + } + } + + + void applyDiffToFile(string deltaPath, string relativeFilePath, string workingDirectory) + { + var inputFile = Path.Combine(deltaPath, relativeFilePath); + var finalTarget = Path.Combine(workingDirectory, Regex.Replace(relativeFilePath, @".diff$", "")); + + var tempTargetFile = Path.GetTempFileName(); + + // NB: Zero-length diffs indicate the file hasn't actually changed + if (new FileInfo(inputFile).Length == 0) { + this.Log().Info("{0} exists unchanged, skipping", relativeFilePath); + return; + } + + if (relativeFilePath.EndsWith(".diff", StringComparison.InvariantCultureIgnoreCase)) { + using (var of = File.OpenWrite(tempTargetFile)) + using (var inf = File.OpenRead(finalTarget)) { + this.Log().Info("Applying Diff to {0}", relativeFilePath); + BinaryPatchUtility.Apply(inf, () => File.OpenRead(inputFile), of); + } + + try { + verifyPatchedFile(relativeFilePath, inputFile, tempTargetFile); + } catch (Exception) { + File.Delete(tempTargetFile); + throw; + } + } else { + using (var of = File.OpenWrite(tempTargetFile)) + using (var inf = File.OpenRead(inputFile)) { + this.Log().Info("Adding new file: {0}", relativeFilePath); + inf.CopyTo(of); + } + } + + if (File.Exists(finalTarget)) File.Delete(finalTarget); + + var targetPath = Directory.GetParent(finalTarget); + if (!targetPath.Exists) targetPath.Create(); + + File.Move(tempTargetFile, finalTarget); + } + + void verifyPatchedFile(string relativeFilePath, string inputFile, string tempTargetFile) + { + var shaFile = Regex.Replace(inputFile, @"\.diff$", ".shasum"); + var expectedReleaseEntry = ReleaseEntry.ParseReleaseEntry(File.ReadAllText(shaFile, Encoding.UTF8)); + var actualReleaseEntry = ReleaseEntry.GenerateFromFile(tempTargetFile); + + if (expectedReleaseEntry.Filesize != actualReleaseEntry.Filesize) { + this.Log().Warn("Patched file {0} has incorrect size, expected {1}, got {2}", relativeFilePath, + expectedReleaseEntry.Filesize, actualReleaseEntry.Filesize); + throw new ChecksumFailedException() {Filename = relativeFilePath}; + } + + if (expectedReleaseEntry.SHA1 != actualReleaseEntry.SHA1) { + this.Log().Warn("Patched file {0} has incorrect SHA1, expected {1}, got {2}", relativeFilePath, + expectedReleaseEntry.SHA1, actualReleaseEntry.SHA1); + throw new ChecksumFailedException() {Filename = relativeFilePath}; + } + } + + bool bytesAreIdentical(byte[] oldData, byte[] newData) + { + if (oldData == null || newData == null) { + return oldData == newData; + } + if (oldData.LongLength != newData.LongLength) { + return false; + } + + for(long i = 0; i < newData.LongLength; i++) { + if (oldData[i] != newData[i]) { + return false; + } + } + + return true; + } + } +} diff --git a/src/Properties/AssemblyInfo.cs b/src/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..9145fd49 --- /dev/null +++ b/src/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Squirrel")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Squirrel")] +[assembly: AssemblyCopyright("Copyright © 2014")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("3c25a7f9-3e99-4556-aba3-f820c74bb4da")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/ReleaseEntry.cs b/src/ReleaseEntry.cs new file mode 100644 index 00000000..98879132 --- /dev/null +++ b/src/ReleaseEntry.cs @@ -0,0 +1,200 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Contracts; +using System.IO; +using System.Linq; +using System.Reactive.Linq; +using System.Text; +using System.Text.RegularExpressions; +using NuGet; +using ReactiveUIMicro; +using Squirrel.Core.Extensions; +using System.Runtime.Serialization; + +namespace Squirrel.Core +{ + public interface IReleaseEntry + { + string SHA1 { get; } + string Filename { get; } + long Filesize { get; } + bool IsDelta { get; } + string EntryAsString { get; } + Version Version { get; } + string PackageName { get; } + + string GetReleaseNotes(string packageDirectory); + } + + [DataContract] + public class ReleaseEntry : IEnableLogger, IReleaseEntry + { + [DataMember] public string SHA1 { get; protected set; } + [DataMember] public string Filename { get; protected set; } + [DataMember] public long Filesize { get; protected set; } + [DataMember] public bool IsDelta { get; protected set; } + + protected ReleaseEntry(string sha1, string filename, long filesize, bool isDelta) + { + Contract.Requires(sha1 != null && sha1.Length == 40); + Contract.Requires(filename != null); + Contract.Requires(filename.Contains(Path.DirectorySeparatorChar) == false); + Contract.Requires(filesize > 0); + + SHA1 = sha1; Filename = filename; Filesize = filesize; IsDelta = isDelta; + } + + [IgnoreDataMember] + public string EntryAsString { + get { return String.Format("{0} {1} {2}", SHA1, Filename, Filesize); } + } + + [IgnoreDataMember] + public Version Version { get { return Filename.ToVersion(); } } + + [IgnoreDataMember] + public string PackageName { + get { + return Filename.Substring(0, Filename.IndexOfAny(new[] { '-', '.' })); + } + } + + public string GetReleaseNotes(string packageDirectory) + { + var zp = new ZipPackage(Path.Combine(packageDirectory, Filename)); + + var t = zp.Id; + + if (String.IsNullOrWhiteSpace(zp.ReleaseNotes)) + throw new Exception(String.Format("Invalid 'ReleaseNotes' value in nuspec file at '{0}'", Path.Combine(packageDirectory, Filename))); + + return zp.ReleaseNotes; + } + + static readonly Regex entryRegex = new Regex(@"^([0-9a-fA-F]{40})\s+(\S+)\s+(\d+)[\r]*$"); + static readonly Regex commentRegex = new Regex(@"#.*$"); + public static ReleaseEntry ParseReleaseEntry(string entry) + { + Contract.Requires(entry != null); + + entry = commentRegex.Replace(entry, ""); + if (String.IsNullOrWhiteSpace(entry)) { + return null; + } + + var m = entryRegex.Match(entry); + if (!m.Success) { + throw new Exception("Invalid release entry: " + entry); + } + + if (m.Groups.Count != 4) { + throw new Exception("Invalid release entry: " + entry); + } + + long size = Int64.Parse(m.Groups[3].Value); + bool isDelta = filenameIsDeltaFile(m.Groups[2].Value); + return new ReleaseEntry(m.Groups[1].Value, m.Groups[2].Value, size, isDelta); + } + + public static IEnumerable ParseReleaseFile(string fileContents) + { + if (String.IsNullOrEmpty(fileContents)) { + return new ReleaseEntry[0]; + } + + var ret = fileContents.Split('\n') + .Where(x => !String.IsNullOrWhiteSpace(x)) + .Select(ParseReleaseEntry) + .Where(x => x != null) + .ToArray(); + + return ret.Any(x => x == null) ? null : ret; + } + + public static void WriteReleaseFile(IEnumerable releaseEntries, Stream stream) + { + Contract.Requires(releaseEntries != null && releaseEntries.Any()); + Contract.Requires(stream != null); + + using (var sw = new StreamWriter(stream, Encoding.UTF8)) { + sw.Write(String.Join("\n", releaseEntries + .OrderBy(x => x.Version) + .ThenByDescending(x => x.IsDelta) + .Select(x => x.EntryAsString))); + } + } + + public static void WriteReleaseFile(IEnumerable releaseEntries, string path) + { + Contract.Requires(releaseEntries != null && releaseEntries.Any()); + Contract.Requires(!String.IsNullOrEmpty(path)); + + using (var f = File.OpenWrite(path)) { + WriteReleaseFile(releaseEntries, f); + } + } + + public static ReleaseEntry GenerateFromFile(Stream file, string filename) + { + Contract.Requires(file != null && file.CanRead); + Contract.Requires(!String.IsNullOrEmpty(filename)); + + var hash = Utility.CalculateStreamSHA1(file); + return new ReleaseEntry(hash, filename, file.Length, filenameIsDeltaFile(filename)); + } + + public static ReleaseEntry GenerateFromFile(string path) + { + using (var inf = File.OpenRead(path)) { + return GenerateFromFile(inf, Path.GetFileName(path)); + } + } + + public static void BuildReleasesFile(string releasePackagesDir, IFileSystemFactory fileSystemFactory = null) + { + fileSystemFactory = fileSystemFactory ?? AnonFileSystem.Default; + var packagesDir = fileSystemFactory.GetDirectoryInfo(releasePackagesDir); + + // Generate release entries for all of the local packages + var entries = packagesDir.GetFiles("*.nupkg").MapReduce(x => Observable.Start(() => { + using (var file = x.OpenRead()) { + return GenerateFromFile(file, x.Name); + } + }, RxApp.TaskpoolScheduler)).First(); + + // Write the new RELEASES file to a temp file then move it into + // place + var tempFile = fileSystemFactory.CreateTempFile(); + try { + if (entries.Count > 0) WriteReleaseFile(entries, tempFile.Item2); + } finally { + tempFile.Item2.Dispose(); + } + + var target = Path.Combine(packagesDir.FullName, "RELEASES"); + if (File.Exists(target)) { + File.Delete(target); + } + + fileSystemFactory.GetFileInfo(tempFile.Item1).MoveTo(target); + } + + static bool filenameIsDeltaFile(string filename) + { + return filename.EndsWith("-delta.nupkg", StringComparison.InvariantCultureIgnoreCase); + } + + public static ReleasePackage GetPreviousRelease(IEnumerable releaseEntries, IReleasePackage package, string targetDir) + { + if (releaseEntries == null || !releaseEntries.Any()) + return null; + + return releaseEntries + .Where(x => x.IsDelta == false) + .Where(x => x.Version < package.ToVersion()) + .OrderByDescending(x => x.Version) + .Select(x => new ReleasePackage(Path.Combine(targetDir, x.Filename), true)) + .FirstOrDefault(); + } + } +} diff --git a/src/ReleasePackage.cs b/src/ReleasePackage.cs new file mode 100644 index 00000000..4b36e1c0 --- /dev/null +++ b/src/ReleasePackage.cs @@ -0,0 +1,281 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel.Design; +using System.Diagnostics.Contracts; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Runtime.Versioning; +using System.Text; +using System.Text.RegularExpressions; +using System.Xml; +using Ionic.Zip; +using NuGet; +using ReactiveUIMicro; +using Squirrel.Core.Extensions; + +namespace Squirrel.Core +{ + internal static class FrameworkTargetVersion + { + public static FrameworkName Net40 = new FrameworkName(".NETFramework,Version=v4.0"); + public static FrameworkName Net45 = new FrameworkName(".NETFramework,Version=v4.5"); + } + + public interface IReleasePackage + { + string InputPackageFile { get; } + string ReleasePackageFile { get; } + string SuggestedReleaseFileName { get; } + + string CreateReleasePackage(string outputFile, string packagesRootDir = null, Func releaseNotesProcessor = null); + } + + public static class VersionComparer + { + public static bool Matches(IVersionSpec versionSpec, SemanticVersion version) + { + if (versionSpec == null) + return true; // I CAN'T DEAL WITH THIS + + bool minVersion; + if (versionSpec.MinVersion == null) { + minVersion = true; // no preconditon? LET'S DO IT + } else if (versionSpec.IsMinInclusive) { + minVersion = version >= versionSpec.MinVersion; + } else { + minVersion = version > versionSpec.MinVersion; + } + + bool maxVersion; + if (versionSpec.MaxVersion == null) { + maxVersion = true; // no preconditon? LET'S DO IT + } else if (versionSpec.IsMaxInclusive) { + maxVersion = version <= versionSpec.MaxVersion; + } else { + maxVersion = version < versionSpec.MaxVersion; + } + + return maxVersion && minVersion; + } + } + + public class ReleasePackage : IEnableLogger, IReleasePackage + { + IEnumerable localPackageCache; + + public ReleasePackage(string inputPackageFile, bool isReleasePackage = false) + { + InputPackageFile = inputPackageFile; + + if (isReleasePackage) { + ReleasePackageFile = inputPackageFile; + } + } + + public string InputPackageFile { get; protected set; } + public string ReleasePackageFile { get; protected set; } + + public string SuggestedReleaseFileName { + get { + var zp = new ZipPackage(InputPackageFile); + return String.Format("{0}-{1}-full.nupkg", zp.Id, zp.Version); + } + } + + public Version Version { get { return InputPackageFile.ToVersion(); } } + + public string CreateReleasePackage(string outputFile, string packagesRootDir = null, Func releaseNotesProcessor = null) + { + Contract.Requires(!String.IsNullOrEmpty(outputFile)); + + if (ReleasePackageFile != null) { + return ReleasePackageFile; + } + + var package = new ZipPackage(InputPackageFile); + + // we can tell from here what platform(s) the package targets + // but given this is a simple package we only + // ever expect one entry here (crash hard otherwise) + var frameworks = package.GetSupportedFrameworks(); + if (frameworks.Count() > 1) { + + var platforms = frameworks + .Aggregate(new StringBuilder(), (sb, f) => sb.Append(f.ToString() + "; ")); + + throw new InvalidOperationException(String.Format( + "The input package file {0} targets multiple platforms - {1} - and cannot be transformed into a release package.", InputPackageFile, platforms)); + } + + var targetFramework = frameworks.Single(); + + // Recursively walk the dependency tree and extract all of the + // dependent packages into the a temporary directory + var dependencies = findAllDependentPackages( + package, + new LocalPackageRepository(packagesRootDir), + frameworkName: targetFramework); + + string tempPath = null; + + using (Utility.WithTempDirectory(out tempPath)) { + var tempDir = new DirectoryInfo(tempPath); + + using(var zf = new ZipFile(InputPackageFile)) { + zf.ExtractAll(tempPath); + } + + extractDependentPackages(dependencies, tempDir, targetFramework); + + var specPath = tempDir.GetFiles("*.nuspec").First().FullName; + + removeDependenciesFromPackageSpec(specPath); + removeDeveloperDocumentation(tempDir); + + if (releaseNotesProcessor != null) { + renderReleaseNotesMarkdown(specPath, releaseNotesProcessor); + } + + addDeltaFilesToContentTypes(tempDir.FullName); + + using (var zf = new ZipFile(outputFile)) { + zf.AddDirectory(tempPath); + zf.Save(); + } + + ReleasePackageFile = outputFile; + return ReleasePackageFile; + } + } + + void extractDependentPackages(IEnumerable dependencies, DirectoryInfo tempPath, FrameworkName framework) + { + dependencies.ForEach(pkg => { + this.Log().Info("Scanning {0}", pkg.Id); + + pkg.GetLibFiles().ForEach(file => { + var outPath = new FileInfo(Path.Combine(tempPath.FullName, file.Path)); + + if (!VersionUtility.IsCompatible(framework , new[] { file.TargetFramework })) + { + this.Log().Info("Ignoring {0} as the target framework is not compatible", outPath); + return; + } + + Directory.CreateDirectory(outPath.Directory.FullName); + + using (var of = File.Create(outPath.FullName)) { + this.Log().Info("Writing {0} to {1}", file.Path, outPath); + file.GetStream().CopyTo(of); + } + }); + }); + } + + void removeDeveloperDocumentation(DirectoryInfo expandedRepoPath) + { + expandedRepoPath.GetAllFilesRecursively() + .Where(x => x.Name.EndsWith(".dll", true, CultureInfo.InvariantCulture)) + .Select(x => new FileInfo(x.FullName.ToLowerInvariant().Replace(".dll", ".xml"))) + .Where(x => x.Exists) + .ForEach(x => x.Delete()); + } + + void renderReleaseNotesMarkdown(string specPath, Func releaseNotesProcessor) + { + var doc = new XmlDocument(); + doc.Load(specPath); + + // XXX: This code looks full tart + var metadata = doc.DocumentElement.ChildNodes + .OfType() + .First(x => x.Name.ToLowerInvariant() == "metadata"); + + var releaseNotes = metadata.ChildNodes + .OfType() + .FirstOrDefault(x => x.Name.ToLowerInvariant() == "releasenotes"); + + if (releaseNotes == null) { + this.Log().Info("No release notes found in {0}", specPath); + return; + } + + releaseNotes.InnerText = String.Format("", + releaseNotesProcessor(releaseNotes.InnerText)); + + doc.Save(specPath); + } + + void removeDependenciesFromPackageSpec(string specPath) + { + var xdoc = new XmlDocument(); + xdoc.Load(specPath); + + var metadata = xdoc.DocumentElement.FirstChild; + var dependenciesNode = metadata.ChildNodes.OfType().FirstOrDefault(x => x.Name.ToLowerInvariant() == "dependencies"); + if (dependenciesNode != null) { + metadata.RemoveChild(dependenciesNode); + } + + xdoc.Save(specPath); + } + + IEnumerable findAllDependentPackages( + IPackage package = null, + IPackageRepository packageRepository = null, + HashSet packageCache = null, + FrameworkName frameworkName = null) + { + package = package ?? new ZipPackage(InputPackageFile); + packageCache = packageCache ?? new HashSet(); + + var deps = package.DependencySets + .Where(x => x.TargetFramework == null + || x.TargetFramework == frameworkName) + .SelectMany(x => x.Dependencies); + + return deps.SelectMany(dependency => { + var ret = matchPackage(packageRepository, dependency.Id, dependency.VersionSpec); + + if (ret == null) { + var message = String.Format("Couldn't find file for package in {1}: {0}", dependency.Id, packageRepository.Source); + this.Log().Error(message); + throw new Exception(message); + } + + if (packageCache.Contains(ret.GetFullName())) { + return Enumerable.Empty(); + } + + packageCache.Add(ret.GetFullName()); + + return findAllDependentPackages(ret, packageRepository, packageCache, frameworkName).StartWith(ret).Distinct(y => y.GetFullName()); + }).ToArray(); + } + + IPackage matchPackage(IPackageRepository packageRepository, string id, IVersionSpec version) + { + return packageRepository.FindPackagesById(id).FirstOrDefault(x => VersionComparer.Matches(version, x.Version)); + } + + + static internal void addDeltaFilesToContentTypes(string rootDirectory) + { + var doc = new XmlDocument(); + var path = Path.Combine(rootDirectory, "[Content_Types].xml"); + doc.Load(path); + + ContentType.Merge(doc); + + using (var sw = new StreamWriter(path, false, Encoding.UTF8)) { + doc.Save(sw); + } + } + } + + public class ChecksumFailedException : Exception + { + public string Filename { get; set; } + } +} diff --git a/src/Squirrel.csproj b/src/Squirrel.csproj new file mode 100644 index 00000000..5a461918 --- /dev/null +++ b/src/Squirrel.csproj @@ -0,0 +1,53 @@ + + + + + Debug + AnyCPU + {1436E22A-FE3C-4D68-9A85-9E74DF2E6A92} + Library + Properties + Squirrel + Squirrel + v4.5 + 512 + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/Utility.cs b/src/Utility.cs new file mode 100644 index 00000000..a9e6cb26 --- /dev/null +++ b/src/Utility.cs @@ -0,0 +1,334 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.Contracts; +using System.IO; +using System.Linq; +using System.Reactive; +using System.Reactive.Concurrency; +using System.Reactive.Disposables; +using System.Reactive.Linq; +using System.Reactive.Subjects; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Security.AccessControl; +using System.Security.Cryptography; +using System.Security.Principal; +using System.Threading; +using ReactiveUIMicro; +using System.Text; + +namespace Squirrel.Core +{ + public static class Utility + { + public static IEnumerable GetAllFilesRecursively(this DirectoryInfo rootPath) + { + Contract.Requires(rootPath != null); + + return rootPath.GetDirectories() + .SelectMany(GetAllFilesRecursively) + .Concat(rootPath.GetFiles()); + } + + public static IEnumerable GetAllFilePathsRecursively(string rootPath) + { + Contract.Requires(rootPath != null); + + return Directory.GetDirectories(rootPath) + .SelectMany(GetAllFilePathsRecursively) + .Concat(Directory.GetFiles(rootPath)); + } + + public static string CalculateFileSHA1(string filePath) + { + Contract.Requires(filePath != null); + + using (var stream = File.OpenRead(filePath)) { + return CalculateStreamSHA1(stream); + } + } + + public static string CalculateStreamSHA1(Stream file) + { + Contract.Requires(file != null && file.CanRead); + + using (var sha1 = SHA1.Create()) { + return BitConverter.ToString(sha1.ComputeHash(file)).Replace("-", String.Empty); + } + } + + public static IObservable CopyToAsync(string from, string to) + { + Contract.Requires(!String.IsNullOrEmpty(from) && File.Exists(from)); + Contract.Requires(!String.IsNullOrEmpty(to)); + + if (!File.Exists(from)) { + Log().Warn("The file {0} does not exist", from); + + // TODO: should we fail this operation? + return Observable.Return(Unit.Default); + } + + // XXX: SafeCopy + return Observable.Start(() => File.Copy(from, to, true), RxApp.TaskpoolScheduler); + } + + public static void Retry(this Action block, int retries = 2) + { + Contract.Requires(retries > 0); + + Func thunk = () => { + block(); + return null; + }; + + thunk.Retry(retries); + } + + public static T Retry(this Func block, int retries = 2) + { + Contract.Requires(retries > 0); + + while (true) { + try { + T ret = block(); + return ret; + } catch (Exception) { + if (retries == 0) { + throw; + } + + retries--; + Thread.Sleep(250); + } + } + } + + public static IObservable> MapReduce(this IObservable This, Func> selector, int degreeOfParallelism = 4) + { + return This.Select(x => Observable.Defer(() => selector(x))).Merge(degreeOfParallelism).ToList(); + } + + public static IObservable> MapReduce(this IEnumerable This, Func> selector, int degreeOfParallelism = 4) + { + return This.ToObservable().Select(x => Observable.Defer(() => selector(x))).Merge(degreeOfParallelism).ToList(); + } + + static string directoryChars; + public static IDisposable WithTempDirectory(out string path) + { + var di = new DirectoryInfo(Environment.GetEnvironmentVariable("SQUIRREL_TEMP") ?? Environment.GetEnvironmentVariable("TEMP") ?? ""); + if (!di.Exists) { + throw new Exception("%TEMP% isn't defined, go set it"); + } + + var tempDir = default(DirectoryInfo); + + directoryChars = directoryChars ?? ( + "abcdefghijklmnopqrstuvwxyz" + + Enumerable.Range(0x4E00, 0x9FCC - 0x4E00) // CJK UNIFIED IDEOGRAPHS + .Aggregate(new StringBuilder(), (acc, x) => { acc.Append(Char.ConvertFromUtf32(x)); return acc; }) + .ToString()); + + foreach (var c in directoryChars) { + var target = Path.Combine(di.FullName, c.ToString()); + + if (!File.Exists(target) && !Directory.Exists(target)) { + Directory.CreateDirectory(target); + tempDir = new DirectoryInfo(target); + break; + } + } + + path = tempDir.FullName; + + return Disposable.Create(() => + DeleteDirectory(tempDir.FullName).Wait()); + } + + public static IObservable DeleteDirectory(string directoryPath, IScheduler scheduler = null) + { + Contract.Requires(!String.IsNullOrEmpty(directoryPath)); + + scheduler = scheduler ?? RxApp.TaskpoolScheduler; + + Log().Info("Starting to delete folder: {0}", directoryPath); + + if (!Directory.Exists(directoryPath)) { + Log().Warn("DeleteDirectory: does not exist - {0}", directoryPath); + return Observable.Return(Unit.Default); + } + + // From http://stackoverflow.com/questions/329355/cannot-delete-directory-with-directory-deletepath-true/329502#329502 + var files = new string[0]; + try { + files = Directory.GetFiles(directoryPath); + } catch (UnauthorizedAccessException ex) { + var message = String.Format("The files inside {0} could not be read", directoryPath); + Log().Warn(message, ex); + } + + var dirs = new string[0]; + try { + dirs = Directory.GetDirectories(directoryPath); + } catch (UnauthorizedAccessException ex) { + var message = String.Format("The directories inside {0} could not be read", directoryPath); + Log().Warn(message, ex); + } + + var fileOperations = files.MapReduce(file => + Observable.Start(() => { + Log().Debug("Now deleting file: {0}", file); + File.SetAttributes(file, FileAttributes.Normal); + File.Delete(file); + }, scheduler)) + .Select(_ => Unit.Default); + + var directoryOperations = + dirs.MapReduce(dir => DeleteDirectory(dir, scheduler) + .Retry(3)) + .Select(_ => Unit.Default); + + return fileOperations + .Merge(directoryOperations, scheduler) + .ToList() // still feeling a bit icky + .Select(_ => { + Log().Debug("Now deleting folder: {0}", directoryPath); + File.SetAttributes(directoryPath, FileAttributes.Normal); + + try { + Directory.Delete(directoryPath, false); + } catch (Exception ex) { + var message = String.Format("DeleteDirectory: could not delete - {0}", directoryPath); + Log().ErrorException(message, ex); + } + return Unit.Default; + }); + } + + public static Tuple CreateTempFile() + { + var path = Path.GetTempFileName(); + return Tuple.Create(path, (Stream) File.OpenWrite(path)); + } + + static TAcc scan(this IEnumerable This, TAcc initialValue, Func accFunc) + { + TAcc acc = initialValue; + + foreach (var x in This) + { + acc = accFunc(acc, x); + } + + return acc; + } + + public static void DeleteDirectoryAtNextReboot(string directoryPath) + { + var di = new DirectoryInfo(directoryPath); + + if (!di.Exists) { + Log().Warn("DeleteDirectoryAtNextReboot: does not exist - {0}", directoryPath); + return; + } + + // NB: MoveFileEx blows up if you're a non-admin, so you always need a backup plan + di.GetFiles().ForEach(x => safeDeleteFileAtNextReboot(x.FullName)); + di.GetDirectories().ForEach(x => DeleteDirectoryAtNextReboot(x.FullName)); + + safeDeleteFileAtNextReboot(directoryPath); + } + + static void safeDeleteFileAtNextReboot(string name) + { + if (MoveFileEx(name, null, MoveFileFlags.MOVEFILE_DELAY_UNTIL_REBOOT)) return; + + // thank you, http://www.pinvoke.net/default.aspx/coredll.getlasterror + var lastError = Marshal.GetLastWin32Error(); + + Log().Error("safeDeleteFileAtNextReboot: failed - {0} - {1}", name, lastError); + } + + static IRxUIFullLogger Log() + { + return LogManager.GetLogger(typeof(Utility)); + } + + [DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)] + static extern bool MoveFileEx(string lpExistingFileName, string lpNewFileName, MoveFileFlags dwFlags); + + [Flags] + enum MoveFileFlags + { + MOVEFILE_REPLACE_EXISTING = 0x00000001, + MOVEFILE_COPY_ALLOWED = 0x00000002, + MOVEFILE_DELAY_UNTIL_REBOOT = 0x00000004, + MOVEFILE_WRITE_THROUGH = 0x00000008, + MOVEFILE_CREATE_HARDLINK = 0x00000010, + MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x00000020 + } + } + + public sealed class SingleGlobalInstance : IDisposable + { + readonly static object gate = 42; + bool HasHandle = false; + Mutex mutex; + EventLoopScheduler lockScheduler = new EventLoopScheduler(); + + public SingleGlobalInstance(string key, int timeOut) + { + if (RxApp.InUnitTestRunner()) { + HasHandle = Observable.Start(() => Monitor.TryEnter(gate, timeOut), lockScheduler).First(); + + if (HasHandle == false) + throw new TimeoutException("Timeout waiting for exclusive access on SingleInstance"); + return; + } + + initMutex(key); + try + { + if (timeOut <= 0) + HasHandle = Observable.Start(() => mutex.WaitOne(Timeout.Infinite, false), lockScheduler).First(); + else + HasHandle = Observable.Start(() => mutex.WaitOne(timeOut, false), lockScheduler).First(); + + if (HasHandle == false) + throw new TimeoutException("Timeout waiting for exclusive access on SingleInstance"); + } + catch (AbandonedMutexException) + { + HasHandle = true; + } + } + + private void initMutex(string key) + { + string mutexId = string.Format("Global\\{{{0}}}", key); + mutex = new Mutex(false, mutexId); + + var allowEveryoneRule = new MutexAccessRule(new SecurityIdentifier(WellKnownSidType.WorldSid, null), MutexRights.FullControl, AccessControlType.Allow); + var securitySettings = new MutexSecurity(); + securitySettings.AddAccessRule(allowEveryoneRule); + mutex.SetAccessControl(securitySettings); + } + + public void Dispose() + { + if (HasHandle && RxApp.InUnitTestRunner()) { + Observable.Start(() => Monitor.Exit(gate), lockScheduler).First(); + HasHandle = false; + } + + if (HasHandle && mutex != null) { + Observable.Start(() => mutex.ReleaseMutex(), lockScheduler).First(); + HasHandle = false; + } + + lockScheduler.Dispose(); + } + } +} diff --git a/src/packages.config b/src/packages.config new file mode 100644 index 00000000..61672b2d --- /dev/null +++ b/src/packages.config @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file