Skip to content

Integrate all the latest from visualfsharp\master #651

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Oct 13, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file modified src/absil/il.fs
100755 → 100644
Empty file.
12 changes: 7 additions & 5 deletions src/absil/ilwrite.fs
Original file line number Diff line number Diff line change
Expand Up @@ -3536,7 +3536,7 @@ let writeDirectory os dict =

let writeBytes (os: BinaryWriter) (chunk:byte[]) = os.Write(chunk,0,chunk.Length)

let writeBinaryAndReportMappings (outfile, ilg, pdbfile: string option, signer: ILStrongNameSigner option, portablePDB, embeddedPDB,
let writeBinaryAndReportMappings (outfile, ilg, pdbfile: string option, signer: ILStrongNameSigner option, portablePDB, embeddedPDB, embedAllSource, embedSourceList,
fixupOverlappingSequencePoints, emitTailcalls, showTimes, dumpDebugInfo) modul noDebugData =
// Store the public key from the signer into the manifest. This means it will be written
// to the binary and also acts as an indicator to leave space for delay sign
Expand Down Expand Up @@ -3690,7 +3690,7 @@ let writeBinaryAndReportMappings (outfile, ilg, pdbfile: string option, signer:
let pdbOpt =
match portablePDB with
| true ->
let (uncompressedLength, contentId, stream) as pdbStream = generatePortablePdb fixupOverlappingSequencePoints showTimes pdbData
let (uncompressedLength, contentId, stream) as pdbStream = generatePortablePdb fixupOverlappingSequencePoints embedAllSource embedSourceList showTimes pdbData
if embeddedPDB then Some (compressPortablePdbStream uncompressedLength contentId stream)
else Some (pdbStream)
| _ -> None
Expand Down Expand Up @@ -4260,13 +4260,15 @@ type options =
pdbfile: string option
portablePDB: bool
embeddedPDB: bool
embedAllSource: bool
embedSourceList: string list
signer: ILStrongNameSigner option
fixupOverlappingSequencePoints: bool
emitTailcalls : bool
showTimes: bool
dumpDebugInfo:bool }

let WriteILBinary (outfile, (args: options), modul, noDebugData) =
ignore (writeBinaryAndReportMappings (outfile, args.ilg, args.pdbfile, args.signer, args.portablePDB, args.embeddedPDB,
args.fixupOverlappingSequencePoints, args.emitTailcalls, args.showTimes,
args.dumpDebugInfo) modul noDebugData)
ignore (writeBinaryAndReportMappings (outfile, args.ilg, args.pdbfile, args.signer, args.portablePDB, args.embeddedPDB,
args.embedAllSource, args.embedSourceList, args.fixupOverlappingSequencePoints,
args.emitTailcalls, args.showTimes, args.dumpDebugInfo) modul noDebugData)
2 changes: 2 additions & 0 deletions src/absil/ilwrite.fsi
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ type options =
pdbfile: string option
portablePDB: bool
embeddedPDB: bool
embedAllSource: bool
embedSourceList: string list
signer : ILStrongNameSigner option
fixupOverlappingSequencePoints : bool
emitTailcalls: bool
Expand Down
94 changes: 82 additions & 12 deletions src/absil/ilwritepdb.fs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,31 @@ open Microsoft.FSharp.Compiler.AbstractIL.Internal.Library
open Microsoft.FSharp.Compiler.ErrorLogger
open Microsoft.FSharp.Compiler.Range


type BlobBuildingStream () =
inherit Stream()

static let chunkSize = 32 * 1024
let builder = new BlobBuilder(chunkSize)

override this.CanWrite = true
override this.CanRead = false
override this.CanSeek = false
override this.Length = int64(builder.Count)

override this.Write(buffer:byte array, offset:int, count:int) = builder.WriteBytes(buffer, offset, count)
override this.WriteByte(value:byte) = builder.WriteByte(value)
member this.WriteInt32(value:int) = builder.WriteInt32(value)
member this.ToImmutableArray() = builder.ToImmutableArray()
member this.TryWriteBytes(stream:Stream, length:int) = builder.TryWriteBytes(stream, length)

override this.Flush() = ()
override this.Dispose(_disposing:bool) = ()
override this.Seek(_offset:int64, _origin:SeekOrigin) = raise (new NotSupportedException())
override this.Read(_buffer:byte array, _offset:int, _count:int) = raise (new NotSupportedException())
override this.SetLength(_value:int64) = raise (new NotSupportedException())
override val Position = 0L with get, set

// --------------------------------------------------------------------
// PDB types
// --------------------------------------------------------------------
Expand Down Expand Up @@ -227,7 +252,7 @@ let fixupOverlappingSequencePoints fixupSPs showTimes methods =
Array.sortInPlaceBy fst allSps
spCounts, allSps

let generatePortablePdb fixupSPs showTimes (info:PdbData) =
let generatePortablePdb fixupSPs (embedAllSource:bool) (embedSourceList:string list) showTimes (info:PdbData) =
sortMethods showTimes info
let _spCounts, _allSps = fixupOverlappingSequencePoints fixupSPs showTimes info.Methods
let externalRowCounts = getRowCounts info.TableRowCounts
Expand All @@ -253,23 +278,69 @@ let generatePortablePdb fixupSPs showTimes (info:PdbData) =
metadata.GetOrAddBlob(writer)

let corSymLanguageTypeFSharp = System.Guid(0xAB4F38C9u, 0xB6E6us, 0x43baus, 0xBEuy, 0x3Buy, 0x58uy, 0x08uy, 0x0Buy, 0x2Cuy, 0xCCuy, 0xE3uy)
let embeddedSource = System.Guid(0x0e8a571bu, 0x6926us, 0x466eus, 0xb4uy, 0xaduy, 0x8auy, 0xb0uy, 0x46uy, 0x11uy, 0xf5uy, 0xfeuy)

/// <summary>
/// The maximum number of bytes in to write out uncompressed.
///
/// This prevents wasting resources on compressing tiny files with little to negative gain
/// in PDB file size.
///
/// Chosen as the point at which we start to see > 10% blob size reduction using all
/// current source files in corefx and roslyn as sample data.
/// </summary>
let sourceCompressionThreshold = 200

let documentIndex =
let includeSource file =
let isInList =
if embedSourceList.Length = 0 then false
else
embedSourceList |> List.tryFind(fun f -> String.Compare(file, f, StringComparison.OrdinalIgnoreCase ) = 0) |> Option.isSome

if not embedAllSource && not isInList || not (File.Exists(file)) then
None
else
let stream = File.OpenRead(file)
let length64 = stream.Length
if length64 > int64(Int32.MaxValue) then raise (new IOException("File is too long"))

let builder = new BlobBuildingStream()
let length = int(length64)
if length < sourceCompressionThreshold then
builder.WriteInt32(0)
builder.TryWriteBytes(stream, length) |> ignore
else
builder.WriteInt32(length) |>ignore
use deflater = new DeflateStream(builder, CompressionMode.Compress, true)
stream.CopyTo(deflater) |> ignore
Some (builder.ToImmutableArray())

let mutable index = new Dictionary<string, DocumentHandle>(docs.Length)
metadata.SetCapacity(TableIndex.Document, docs.Length)
for doc in docs do
let handle =
match checkSum doc.File with
| Some (hashAlg, checkSum) ->
serializeDocumentName doc.File,
metadata.GetOrAddGuid(hashAlg),
metadata.GetOrAddBlob(checkSum.ToImmutableArray()),
metadata.GetOrAddGuid(corSymLanguageTypeFSharp)
let dbgInfo =
(serializeDocumentName doc.File,
metadata.GetOrAddGuid(hashAlg),
metadata.GetOrAddBlob(checkSum.ToImmutableArray()),
metadata.GetOrAddGuid(corSymLanguageTypeFSharp)) |> metadata.AddDocument
match includeSource doc.File with
| None -> ()
| Some blob ->
metadata.AddCustomDebugInformation(DocumentHandle.op_Implicit(dbgInfo),
metadata.GetOrAddGuid(embeddedSource),
metadata.GetOrAddBlob(blob)) |> ignore
dbgInfo
| None ->
serializeDocumentName doc.File,
metadata.GetOrAddGuid(System.Guid.Empty),
metadata.GetOrAddBlob(ImmutableArray<byte>.Empty),
metadata.GetOrAddGuid(corSymLanguageTypeFSharp)
|> metadata.AddDocument
let dbgInfo =
(serializeDocumentName doc.File,
metadata.GetOrAddGuid(System.Guid.Empty),
metadata.GetOrAddBlob(ImmutableArray<byte>.Empty),
metadata.GetOrAddGuid(corSymLanguageTypeFSharp)) |> metadata.AddDocument
dbgInfo
index.Add(doc.File, handle)
index

Expand All @@ -291,7 +362,7 @@ let generatePortablePdb fixupSPs showTimes (info:PdbData) =
else
match documentIndex.TryGetValue(docs.[d].File) with
| false, _ -> Unchecked.defaultof<DocumentHandle>
| true, f -> f
| true, h -> h

if sps.Length = 0 then
Unchecked.defaultof<DocumentHandle>, Unchecked.defaultof<BlobHandle>
Expand All @@ -306,7 +377,6 @@ let generatePortablePdb fixupSPs showTimes (info:PdbData) =
singleDocumentIndex

let builder = new BlobBuilder()

builder.WriteCompressedInteger(minfo.LocalSignatureToken)

// Initial document: When sp's spread over more than one document we put the initial document here.
Expand Down
2 changes: 1 addition & 1 deletion src/absil/ilwritepdb.fsi
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ type idd =
iddData: byte[];
iddChunk: BinaryChunk }

val generatePortablePdb : fixupSPs:bool -> showTimes:bool -> info:PdbData -> (int64 * BlobContentId * MemoryStream)
val generatePortablePdb : fixupSPs:bool -> embedAllSource:bool -> embedSourceList:string list -> showTimes:bool -> info:PdbData -> (int64 * BlobContentId * MemoryStream)
val compressPortablePdbStream : uncompressedLength:int64 -> contentId:BlobContentId -> stream:MemoryStream -> (int64 * BlobContentId * MemoryStream)
val embedPortablePdbInfo : uncompressedLength:int64 -> contentId:BlobContentId -> stream:MemoryStream -> showTimes:bool -> fpdb:string -> cvChunk:BinaryChunk -> pdbChunk:BinaryChunk -> idd[]
val writePortablePdbInfo : contentId:BlobContentId -> stream:MemoryStream -> showTimes:bool -> fpdb:string -> cvChunk:BinaryChunk -> idd[]
Expand Down
Loading