[Pkg-cli-apps-commits] [fsharp] 10/17: Imported Upstream version 4.0.0.4+dfsg2
Jo Shields
directhex at moszumanska.debian.org
Wed Dec 2 14:59:34 UTC 2015
This is an automated email from the git hooks/post-receive script.
directhex pushed a commit to annotated tag debian/4.0.0.4+dfsg2-1
in repository fsharp.
commit d257718be1930f2470888ee7b9acaa2b0f1c025b
Author: Jo Shields <jo.shields at xamarin.com>
Date: Tue Dec 1 16:25:31 2015 +0000
Imported Upstream version 4.0.0.4+dfsg2
---
lib/bootstrap/src/Common/Arg.fs | 133 ++++++
lib/bootstrap/src/Common/Arg.fsi | 50 ++
.../src/FsLexYacc.Runtime/AssemblyInfo.fs | 12 +
.../src/FsLexYacc.Runtime/FsLexYacc.Runtime.fsproj | 66 +++
lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fs | 427 +++++++++++++++++
lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fsi | 151 ++++++
lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fs | 512 +++++++++++++++++++++
lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fsi | 130 ++++++
8 files changed, 1481 insertions(+)
diff --git a/lib/bootstrap/src/Common/Arg.fs b/lib/bootstrap/src/Common/Arg.fs
new file mode 100644
index 0000000..a1f63bd
--- /dev/null
+++ b/lib/bootstrap/src/Common/Arg.fs
@@ -0,0 +1,133 @@
+// (c) Microsoft Corporation 2005-2009.
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+namespace Internal.Utilities
+#else
+namespace Microsoft.FSharp.Text
+#endif
+
+
+type ArgType =
+ | ClearArg of bool ref
+ | FloatArg of (float -> unit)
+ | IntArg of (int -> unit)
+ | RestArg of (string -> unit)
+ | SetArg of bool ref
+ | StringArg of (string -> unit)
+ | UnitArg of (unit -> unit)
+ static member Clear r = ClearArg r
+ static member Float r = FloatArg r
+ static member Int r = IntArg r
+ static member Rest r = RestArg r
+ static member Set r = SetArg r
+ static member String r = StringArg r
+ static member Unit r = UnitArg r
+
+
+type ArgInfo (name,action,help) =
+ member x.Name = name
+ member x.ArgType = action
+ member x.HelpText = help
+
+exception Bad of string
+exception HelpText of string
+
+[<Sealed>]
+type ArgParser() =
+ static let getUsage specs u =
+ let sbuf = new System.Text.StringBuilder 100
+ let pstring (s:string) = sbuf.Append s |> ignore
+ let pendline s = pstring s; pstring "\n"
+ pendline u;
+ List.iter (fun (arg:ArgInfo) ->
+ match arg.Name, arg.ArgType, arg.HelpText with
+ | (s, (UnitArg _ | SetArg _ | ClearArg _), helpText) -> pstring "\t"; pstring s; pstring ": "; pendline helpText
+ | (s, StringArg _, helpText) -> pstring "\t"; pstring s; pstring " <string>: "; pendline helpText
+ | (s, IntArg _, helpText) -> pstring "\t"; pstring s; pstring " <int>: "; pendline helpText
+ | (s, FloatArg _, helpText) -> pstring "\t"; pstring s; pstring " <float>: "; pendline helpText
+ | (s, RestArg _, helpText) -> pstring "\t"; pstring s; pstring " ...: "; pendline helpText)
+ specs;
+ pstring "\t"; pstring "--help"; pstring ": "; pendline "display this list of options";
+ pstring "\t"; pstring "-help"; pstring ": "; pendline "display this list of options";
+ sbuf.ToString()
+
+
+ static member ParsePartial(cursor,argv,argSpecs:seq<ArgInfo>,?other,?usageText) =
+ let other = defaultArg other (fun _ -> ())
+ let usageText = defaultArg usageText ""
+ let nargs = Array.length argv
+ incr cursor;
+ let argSpecs = argSpecs |> Seq.toList
+ let specs = argSpecs |> List.map (fun (arg:ArgInfo) -> arg.Name, arg.ArgType)
+ while !cursor < nargs do
+ let arg = argv.[!cursor]
+ let rec findMatchingArg args =
+ match args with
+ | ((s, action) :: _) when s = arg ->
+ let getSecondArg () =
+ if !cursor + 1 >= nargs then
+ raise(Bad("option "+s+" needs an argument.\n"+getUsage argSpecs usageText));
+ argv.[!cursor+1]
+
+ match action with
+ | UnitArg f ->
+ f ();
+ incr cursor
+ | SetArg f ->
+ f := true;
+ incr cursor
+ | ClearArg f ->
+ f := false;
+ incr cursor
+ | StringArg f->
+ let arg2 = getSecondArg()
+ f arg2;
+ cursor := !cursor + 2
+ | IntArg f ->
+ let arg2 = getSecondArg ()
+ let arg2 = try int32 arg2 with _ -> raise(Bad(getUsage argSpecs usageText)) in
+ f arg2;
+ cursor := !cursor + 2;
+ | FloatArg f ->
+ let arg2 = getSecondArg()
+ let arg2 = try float arg2 with _ -> raise(Bad(getUsage argSpecs usageText)) in
+ f arg2;
+ cursor := !cursor + 2;
+ | RestArg f ->
+ incr cursor;
+ while !cursor < nargs do
+ f (argv.[!cursor]);
+ incr cursor;
+
+ | (_ :: more) -> findMatchingArg more
+ | [] ->
+ if arg = "-help" || arg = "--help" || arg = "/help" || arg = "/help" || arg = "/?" then
+ raise (HelpText (getUsage argSpecs usageText))
+ // Note: for '/abc/def' does not count as an argument
+ // Note: '/abc' does
+ elif arg.Length>0 && (arg.[0] = '-' || (arg.[0] = '/' && not (arg.Length > 1 && arg.[1..].Contains ("/")))) then
+ raise (Bad ("unrecognized argument: "+ arg + "\n" + getUsage argSpecs usageText))
+ else
+ other arg;
+ incr cursor
+ findMatchingArg specs
+
+ static member Usage (specs,?usage) =
+ let usage = defaultArg usage ""
+ System.Console.Error.WriteLine (getUsage (Seq.toList specs) usage)
+
+ #if FX_NO_COMMAND_LINE_ARGS
+ #else
+ static member Parse (specs,?other,?usageText) =
+ let current = ref 0
+ let argv = System.Environment.GetCommandLineArgs()
+ try ArgParser.ParsePartial (current, argv, specs, ?other=other, ?usageText=usageText)
+ with
+ | Bad h
+ | HelpText h ->
+ System.Console.Error.WriteLine h;
+ System.Console.Error.Flush();
+ System.Environment.Exit(1);
+ | e ->
+ reraise()
+ #endif
diff --git a/lib/bootstrap/src/Common/Arg.fsi b/lib/bootstrap/src/Common/Arg.fsi
new file mode 100644
index 0000000..367f69f
--- /dev/null
+++ b/lib/bootstrap/src/Common/Arg.fsi
@@ -0,0 +1,50 @@
+// (c) Microsoft Corporation 2005-2009.
+
+/// A simple command-line argument processor.
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+namespace Internal.Utilities
+#else
+namespace Microsoft.FSharp.Text
+#endif
+
+/// The spec value describes the action of the argument,
+/// and whether it expects a following parameter.
+[<Sealed>]
+type ArgType =
+ static member Clear : bool ref -> ArgType
+ static member Float : (float -> unit) -> ArgType
+ static member Int : (int -> unit) -> ArgType
+ static member Rest : (string -> unit) -> ArgType
+ static member Set : bool ref -> ArgType
+ static member String : (string -> unit) -> ArgType
+ static member Unit : (unit -> unit) -> ArgType
+
+type ArgInfo =
+ new : name:string * action:ArgType * help:string -> ArgInfo
+ /// Return the name of the argument
+ member Name : string
+ /// Return the argument type and action of the argument
+ member ArgType : ArgType
+ /// Return the usage help associated with the argument
+ member HelpText : string
+
+[<Sealed>]
+type ArgParser =
+ #if FX_NO_COMMAND_LINE_ARGS
+ #else
+
+ /// Parse some of the arguments given by 'argv', starting at the given position
+ [<System.Obsolete("This method should not be used directly as it will be removed in a future revision of this library")>]
+ static member ParsePartial: cursor: int ref * argv: string[] * arguments:seq<ArgInfo> * ?otherArgs: (string -> unit) * ?usageText:string -> unit
+
+ /// Parse the arguments given by System.Environment.GetEnvironmentVariables()
+ /// according to the argument processing specifications "specs".
+ /// Args begin with "-". Non-arguments are passed to "f" in
+ /// order. "use" is printed as part of the usage line if an error occurs.
+
+ static member Parse: arguments:seq<ArgInfo> * ?otherArgs: (string -> unit) * ?usageText:string -> unit
+ #endif
+
+ /// Prints the help for each argument.
+ static member Usage : arguments:seq<ArgInfo> * ?usage:string -> unit
+
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/AssemblyInfo.fs b/lib/bootstrap/src/FsLexYacc.Runtime/AssemblyInfo.fs
new file mode 100644
index 0000000..b78f260
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/AssemblyInfo.fs
@@ -0,0 +1,12 @@
+namespace System
+open System.Reflection
+
+[<assembly: AssemblyTitleAttribute("FsLexYacc.Runtime")>]
+[<assembly: AssemblyProductAttribute("FsLexYacc.Runtime")>]
+[<assembly: AssemblyDescriptionAttribute("FsLex/FsYacc lexer/parser generation tools")>]
+[<assembly: AssemblyVersionAttribute("6.1.0")>]
+[<assembly: AssemblyFileVersionAttribute("6.1.0")>]
+do ()
+
+module internal AssemblyVersionInformation =
+ let [<Literal>] Version = "6.1.0"
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/FsLexYacc.Runtime.fsproj b/lib/bootstrap/src/FsLexYacc.Runtime/FsLexYacc.Runtime.fsproj
new file mode 100644
index 0000000..87c353e
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/FsLexYacc.Runtime.fsproj
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform>AnyCPU</Platform>
+ <ProjectGuid>{7e90d6ce-a10b-4858-a5bc-41df7250cbcc}</ProjectGuid>
+ <TreatWarningsAsErrors>
+ </TreatWarningsAsErrors>
+ <!-- 5310 tracks reenabling -->
+ <OutputType>Library</OutputType>
+ <AssemblyName>FsLexYacc.Runtime</AssemblyName>
+ <AllowCrossTargeting>true</AllowCrossTargeting>
+ <TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
+ <TargetFrameworkProfile>Client</TargetFrameworkProfile>
+ <TargetFSharpCoreVersion>4.3.0.0</TargetFSharpCoreVersion>
+ <WarningLevel>3</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>..\..\bin\Debug\</OutputPath>
+ <DocumentationFile>..\..\bin\Debug\FsLexYacc.Runtime.xml</DocumentationFile>
+ <Tailcalls>false</Tailcalls>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>..\..\bin\</OutputPath>
+ <DocumentationFile>..\..\bin\FsLexYacc.Runtime.xml</DocumentationFile>
+ <Tailcalls>true</Tailcalls>
+ </PropertyGroup>
+ <ItemGroup>
+ <Compile Include="AssemblyInfo.fs">
+ <Link>AssemblyInfo.fs</Link>
+ </Compile>
+ <Compile Include="..\FsLexYacc.Runtime\Lexing.fsi">
+ <Link>lexing.fsi</Link>
+ </Compile>
+ <Compile Include="..\FsLexYacc.Runtime\Lexing.fs">
+ <Link>lexing.fs</Link>
+ </Compile>
+ <Compile Include="..\FsLexYacc.Runtime\Parsing.fsi">
+ <Link>parsing.fsi</Link>
+ </Compile>
+ <Compile Include="..\FsLexYacc.Runtime\Parsing.fs">
+ <Link>parsing.fs</Link>
+ </Compile>
+ </ItemGroup>
+ <ItemGroup>
+ <Reference Include="mscorlib" />
+ <Reference Include="System" />
+ <Reference Include="System.Core" />
+ <Reference Include="FSharp.Core, Version=$(TargetFSharpCoreVersion), Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a">
+ <Private>False</Private>
+ </Reference>
+ </ItemGroup>
+ <PropertyGroup Condition="Exists('$(MSBuildExtensionsPath32)\..\Microsoft SDKs\F#\3.0\Framework\v4.0\Microsoft.FSharp.Targets')">
+ <FSharpTargetsPath>$(MSBuildExtensionsPath32)\..\Microsoft SDKs\F#\3.0\Framework\v4.0\Microsoft.FSharp.Targets</FSharpTargetsPath>
+ </PropertyGroup>
+ <PropertyGroup Condition="Exists('$(MSBuildExtensionsPath32)\..\Microsoft SDKs\F#\3.1\Framework\v4.0\Microsoft.FSharp.Targets')">
+ <FSharpTargetsPath>$(MSBuildExtensionsPath32)\..\Microsoft SDKs\F#\3.1\Framework\v4.0\Microsoft.FSharp.Targets</FSharpTargetsPath>
+ </PropertyGroup>
+ <Import Project="$(FSharpTargetsPath)" Condition="Exists('$(FSharpTargetsPath)')" />
+</Project>
\ No newline at end of file
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fs b/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fs
new file mode 100644
index 0000000..c9050c5
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fs
@@ -0,0 +1,427 @@
+// (c) Microsoft Corporation 2005-2009.
+
+#nowarn "47" // recursive initialization of LexBuffer
+
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+namespace Internal.Utilities.Text.Lexing
+
+#else
+namespace Microsoft.FSharp.Text.Lexing
+#endif
+
+ open System.Collections.Generic
+
+ // REVIEW: This type showed up on a parsing-intensive performance measurement. Consider whether it can be a struct-record later when we have this feature. -jomo
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ type internal Position =
+#else
+ type Position =
+#endif
+ { pos_fname : string;
+ pos_lnum : int;
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ pos_orig_lnum : int;
+#endif
+ pos_bol : int;
+ pos_cnum : int; }
+ member x.FileName = x.pos_fname
+ member x.Line = x.pos_lnum
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ member x.OriginalLine = x.pos_orig_lnum
+#endif
+ member x.Char = x.pos_cnum
+ member x.AbsoluteOffset = x.pos_cnum
+ member x.StartOfLine = x.pos_bol
+ member x.StartOfLineAbsoluteOffset = x.pos_bol
+ member x.Column = x.pos_cnum - x.pos_bol
+ member pos.NextLine =
+ { pos with
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ pos_orig_lnum = pos.OriginalLine + 1;
+#endif
+ pos_lnum = pos.Line+1;
+ pos_bol = pos.AbsoluteOffset }
+ member pos.EndOfToken(n) = {pos with pos_cnum=pos.pos_cnum + n }
+ member pos.AsNewLinePos() = pos.NextLine
+ member pos.ShiftColumnBy(by) = {pos with pos_cnum = pos.pos_cnum + by}
+ static member Empty =
+ { pos_fname="";
+ pos_lnum= 0;
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ pos_orig_lnum = 0;
+#endif
+ pos_bol= 0;
+ pos_cnum=0 }
+ static member FirstLine(filename) =
+ { pos_fname=filename;
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ pos_orig_lnum = 1;
+#endif
+ pos_lnum= 1;
+ pos_bol= 0;
+ pos_cnum=0 }
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ type internal LexBufferFiller<'char> =
+#else
+ type LexBufferFiller<'char> =
+#endif
+ { fillSync : (LexBuffer<'char> -> unit) option
+ fillAsync : (LexBuffer<'char> -> Async<unit>) option }
+
+ and [<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ internal LexBuffer<'char>(filler: LexBufferFiller<'char>) as this =
+#else
+ LexBuffer<'char>(filler: LexBufferFiller<'char>) as this =
+#endif
+ let context = new Dictionary<string,obj>(1) in
+ let extendBufferSync = (fun () -> match filler.fillSync with Some refill -> refill this | None -> invalidOp "attempt to read synchronously from an asynchronous lex buffer")
+ let extendBufferAsync = (fun () -> match filler.fillAsync with Some refill -> refill this | None -> invalidOp "attempt to read asynchronously from a synchronous lex buffer")
+ let mutable buffer=[||];
+ /// number of valid charactes beyond bufferScanStart
+ let mutable bufferMaxScanLength=0;
+ /// count into the buffer when scanning
+ let mutable bufferScanStart=0;
+ /// number of characters scanned so far
+ let mutable bufferScanLength=0;
+ /// length of the scan at the last accepting state
+ let mutable lexemeLength=0;
+ /// action related to the last accepting state
+ let mutable bufferAcceptAction=0;
+ let mutable eof = false;
+ let mutable startPos = Position.Empty ;
+ let mutable endPos = Position.Empty
+
+ // Throw away all the input besides the lexeme
+
+ let discardInput () =
+ let keep = Array.sub buffer bufferScanStart bufferScanLength
+ let nkeep = keep.Length
+ Array.blit keep 0 buffer 0 nkeep;
+ bufferScanStart <- 0;
+ bufferMaxScanLength <- nkeep
+
+
+ member lexbuf.EndOfScan () : int =
+ // Printf.eprintf "endOfScan, lexBuffer.lexemeLength = %d\n" lexBuffer.lexemeLength;
+ if bufferAcceptAction < 0 then
+ failwith "unrecognized input"
+
+ // Printf.printf "endOfScan %d state %d on unconsumed input '%c' (%d)\n" a s (Char.chr inp) inp;
+ // Printf.eprintf "accept, lexeme = %s\n" (lexeme lexBuffer);
+ lexbuf.StartPos <- endPos;
+ lexbuf.EndPos <- endPos.EndOfToken(lexbuf.LexemeLength);
+ bufferAcceptAction
+
+ member lexbuf.StartPos
+ with get() = startPos
+ and set(b) = startPos <- b
+
+ member lexbuf.EndPos
+ with get() = endPos
+ and set(b) = endPos <- b
+
+ member lexbuf.Lexeme = Array.sub buffer bufferScanStart lexemeLength
+ member lexbuf.LexemeChar(n) = buffer.[n+bufferScanStart]
+
+ member lexbuf.BufferLocalStore = (context :> IDictionary<_,_>)
+ member lexbuf.LexemeLength with get() : int = lexemeLength and set v = lexemeLength <- v
+ member internal lexbuf.Buffer with get() : 'char[] = buffer and set v = buffer <- v
+ member internal lexbuf.BufferMaxScanLength with get() = bufferMaxScanLength and set v = bufferMaxScanLength <- v
+ member internal lexbuf.BufferScanLength with get() = bufferScanLength and set v = bufferScanLength <- v
+ member internal lexbuf.BufferScanStart with get() : int = bufferScanStart and set v = bufferScanStart <- v
+ member internal lexbuf.BufferAcceptAction with get() = bufferAcceptAction and set v = bufferAcceptAction <- v
+ member internal lexbuf.RefillBuffer = extendBufferSync
+ member internal lexbuf.AsyncRefillBuffer = extendBufferAsync
+
+ static member LexemeString(lexbuf:LexBuffer<char>) =
+ new System.String(lexbuf.Buffer,lexbuf.BufferScanStart,lexbuf.LexemeLength)
+
+ member lexbuf.IsPastEndOfStream
+ with get() = eof
+ and set(b) = eof <- b
+
+ member lexbuf.DiscardInput() = discardInput ()
+
+ member x.BufferScanPos = bufferScanStart + bufferScanLength
+
+ member lexbuf.EnsureBufferSize n =
+ if lexbuf.BufferScanPos + n >= buffer.Length then
+ let repl = Array.zeroCreate (lexbuf.BufferScanPos + n)
+ Array.blit buffer bufferScanStart repl bufferScanStart bufferScanLength;
+ buffer <- repl
+
+ static member FromReadFunctions (syncRead : ('char[] * int * int -> int) option, asyncRead : ('char[] * int * int -> Async<int>) option) : LexBuffer<'char> =
+ let extension= Array.zeroCreate 4096
+ let fillers =
+ { fillSync =
+ match syncRead with
+ | None -> None
+ | Some read ->
+ Some (fun lexBuffer ->
+ let n = read(extension,0,extension.Length)
+ lexBuffer.EnsureBufferSize n;
+ Array.blit extension 0 lexBuffer.Buffer lexBuffer.BufferScanPos n;
+ lexBuffer.BufferMaxScanLength <- lexBuffer.BufferScanLength + n);
+ fillAsync =
+ match asyncRead with
+ | None -> None
+ | Some read ->
+ Some (fun lexBuffer ->
+ async {
+ let! n = read(extension,0,extension.Length)
+ lexBuffer.EnsureBufferSize n;
+ Array.blit extension 0 lexBuffer.Buffer lexBuffer.BufferScanPos n;
+ lexBuffer.BufferMaxScanLength <- lexBuffer.BufferScanLength + n }) }
+ new LexBuffer<_>(fillers)
+
+ // A full type signature is required on this method because it is used at more specific types within its own scope
+ static member FromFunction (f : 'char[] * int * int -> int) : LexBuffer<'char> = LexBuffer<_>.FromReadFunctions(Some(f),None)
+ static member FromAsyncFunction (f : 'char[] * int * int -> Async<int>) : LexBuffer<'char> = LexBuffer<_>.FromReadFunctions(None,Some(f))
+
+ static member FromCharFunction f : LexBuffer<char> =
+ LexBuffer<char>.FromFunction(fun (buff,start,len) ->
+ let buff2 = Array.zeroCreate len
+ let n = f buff2 len
+ Array.blit buff2 0 buff start len
+ n)
+ static member FromByteFunction f : LexBuffer<byte> =
+ LexBuffer<byte>.FromFunction(fun (buff,start,len) ->
+ let buff2 = Array.zeroCreate len
+ let n = f buff2 len
+ Array.blit buff2 0 buff start len
+ n)
+
+ // A full type signature is required on this method because it is used at more specific types within its own scope
+ static member FromArray (s: 'char[]) : LexBuffer<'char> =
+ let lexBuffer =
+ new LexBuffer<_>
+ { fillSync = Some (fun _ -> ());
+ fillAsync = Some (fun _ -> async { return () }) }
+ let buffer = Array.copy s
+ lexBuffer.Buffer <- buffer;
+ lexBuffer.BufferMaxScanLength <- buffer.Length;
+ lexBuffer
+
+ static member FromBytes (arr) = LexBuffer<byte>.FromArray(arr)
+ static member FromChars (arr) = LexBuffer<char>.FromArray(arr)
+ static member FromString (s:string) = LexBuffer<char>.FromChars (s.ToCharArray())
+
+ static member FromTextReader (tr:System.IO.TextReader) : LexBuffer<char> =
+ LexBuffer<char>.FromFunction(tr.Read)
+
+ static member FromBinaryReader (br:System.IO.BinaryReader) : LexBuffer<byte> =
+ LexBuffer<byte>.FromFunction(br.Read)
+
+ static member FromStream (stream:System.IO.Stream) : LexBuffer<byte> =
+ LexBuffer<byte>.FromReadFunctions(Some(stream.Read),Some(fun (buf,offset,len) -> stream.AsyncRead(buf,offset=offset,count=len)))
+
+ module GenericImplFragments =
+ let startInterpret(lexBuffer:LexBuffer<_>)=
+ lexBuffer.BufferScanStart <- lexBuffer.BufferScanStart + lexBuffer.LexemeLength;
+ lexBuffer.BufferMaxScanLength <- lexBuffer.BufferMaxScanLength - lexBuffer.LexemeLength;
+ lexBuffer.BufferScanLength <- 0;
+ lexBuffer.LexemeLength <- 0;
+ lexBuffer.BufferAcceptAction <- -1;
+
+ let afterRefill (trans: uint16[] array,sentinel,lexBuffer:LexBuffer<_>,scanUntilSentinel,endOfScan,state,eofPos) =
+ // end of file occurs if we couldn't extend the buffer
+ if lexBuffer.BufferScanLength = lexBuffer.BufferMaxScanLength then
+ let snew = int trans.[state].[eofPos] // == EOF
+ if snew = sentinel then
+ endOfScan()
+ else
+ if lexBuffer.IsPastEndOfStream then failwith "End of file on lexing stream";
+ lexBuffer.IsPastEndOfStream <- true;
+ // Printf.printf "state %d --> %d on eof\n" state snew;
+ scanUntilSentinel(lexBuffer,snew)
+ else
+ scanUntilSentinel(lexBuffer, state)
+
+ let onAccept (lexBuffer:LexBuffer<_>,a) =
+ lexBuffer.LexemeLength <- lexBuffer.BufferScanLength;
+ lexBuffer.BufferAcceptAction <- a;
+
+ open GenericImplFragments
+
+ [<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ type internal AsciiTables(trans: uint16[] array, accept: uint16[]) =
+#else
+ type AsciiTables(trans: uint16[] array, accept: uint16[]) =
+#endif
+ let rec scanUntilSentinel(lexBuffer, state) =
+ let sentinel = 255 * 256 + 255
+ // Return an endOfScan after consuming the input
+ let a = int accept.[state]
+ if a <> sentinel then
+ onAccept (lexBuffer,a)
+
+ if lexBuffer.BufferScanLength = lexBuffer.BufferMaxScanLength then
+ lexBuffer.DiscardInput();
+ lexBuffer.RefillBuffer ();
+ // end of file occurs if we couldn't extend the buffer
+ afterRefill (trans,sentinel,lexBuffer,scanUntilSentinel,lexBuffer.EndOfScan,state,256 (* == EOF *) )
+ else
+ // read a character - end the scan if there are no further transitions
+ let inp = int(lexBuffer.Buffer.[lexBuffer.BufferScanPos])
+ let snew = int trans.[state].[inp]
+ if snew = sentinel then
+ lexBuffer.EndOfScan()
+ else
+ lexBuffer.BufferScanLength <- lexBuffer.BufferScanLength + 1;
+ // Printf.printf "state %d --> %d on '%c' (%d)\n" state snew (Char.chr inp) inp;
+ scanUntilSentinel(lexBuffer, snew)
+
+ /// Interpret tables for an ascii lexer generated by fslex.
+ member tables.Interpret(initialState,lexBuffer : LexBuffer<byte>) =
+ startInterpret(lexBuffer)
+ scanUntilSentinel(lexBuffer, initialState)
+
+ /// Interpret tables for an ascii lexer generated by fslex.
+ member tables.AsyncInterpret(initialState,lexBuffer : LexBuffer<byte>) =
+
+ let rec scanUntilSentinel(lexBuffer,state) : Async<int> =
+ async {
+ let sentinel = 255 * 256 + 255
+ // Return an endOfScan after consuming the input
+ let a = int accept.[state]
+ if a <> sentinel then
+ onAccept (lexBuffer,a)
+
+ if lexBuffer.BufferScanLength = lexBuffer.BufferMaxScanLength then
+ lexBuffer.DiscardInput();
+ do! lexBuffer.AsyncRefillBuffer ();
+ // end of file occurs if we couldn't extend the buffer
+ return! afterRefill (trans,sentinel,lexBuffer,scanUntilSentinel,endOfScan,state,256 (* == EOF *) )
+ else
+ // read a character - end the scan if there are no further transitions
+ let inp = int(lexBuffer.Buffer.[lexBuffer.BufferScanPos])
+ let snew = int trans.[state].[inp]
+ if snew = sentinel then
+ return! endOfScan()
+ else
+ lexBuffer.BufferScanLength <- lexBuffer.BufferScanLength + 1;
+ return! scanUntilSentinel(lexBuffer,snew)
+ }
+ and endOfScan() =
+ async { return lexBuffer.EndOfScan() }
+ startInterpret(lexBuffer)
+ scanUntilSentinel(lexBuffer, initialState)
+
+
+ static member Create(trans,accept) = new AsciiTables(trans,accept)
+
+ [<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ type internal UnicodeTables(trans: uint16[] array, accept: uint16[]) =
+#else
+ type UnicodeTables(trans: uint16[] array, accept: uint16[]) =
+#endif
+ let sentinel = 255 * 256 + 255
+ let numUnicodeCategories = 30
+ let numLowUnicodeChars = 128
+ let numSpecificUnicodeChars = (trans.[0].Length - 1 - numLowUnicodeChars - numUnicodeCategories)/2
+ let lookupUnicodeCharacters (state,inp) =
+ let inpAsInt = int inp
+ // Is it a fast ASCII character?
+ if inpAsInt < numLowUnicodeChars then
+ int trans.[state].[inpAsInt]
+ else
+ // Search for a specific unicode character
+ let baseForSpecificUnicodeChars = numLowUnicodeChars
+ let rec loop i =
+ if i >= numSpecificUnicodeChars then
+ // OK, if we failed then read the 'others' entry in the alphabet,
+ // which covers all Unicode characters not covered in other
+ // ways
+ let baseForUnicodeCategories = numLowUnicodeChars+numSpecificUnicodeChars*2
+#if FX_WINRT
+ let unicodeCategory = System.Globalization.CharUnicodeInfo.GetUnicodeCategory(inp)
+#else
+ let unicodeCategory = System.Char.GetUnicodeCategory(inp)
+#endif
+ //System.Console.WriteLine("inp = {0}, unicodeCategory = {1}", [| box inp; box unicodeCategory |]);
+ int trans.[state].[baseForUnicodeCategories + int32 unicodeCategory]
+ else
+ // This is the specific unicode character
+ let c = char (int trans.[state].[baseForSpecificUnicodeChars+i*2])
+ //System.Console.WriteLine("c = {0}, inp = {1}, i = {2}", [| box c; box inp; box i |]);
+ // OK, have we found the entry for a specific unicode character?
+ if c = inp
+ then int trans.[state].[baseForSpecificUnicodeChars+i*2+1]
+ else loop(i+1)
+
+ loop 0
+ let eofPos = numLowUnicodeChars + 2*numSpecificUnicodeChars + numUnicodeCategories
+
+ let rec scanUntilSentinel(lexBuffer,state) =
+ // Return an endOfScan after consuming the input
+ let a = int accept.[state]
+ if a <> sentinel then
+ onAccept(lexBuffer,a)
+
+ if lexBuffer.BufferScanLength = lexBuffer.BufferMaxScanLength then
+ lexBuffer.DiscardInput();
+ lexBuffer.RefillBuffer ();
+ // end of file occurs if we couldn't extend the buffer
+ afterRefill (trans,sentinel,lexBuffer,scanUntilSentinel,lexBuffer.EndOfScan,state,eofPos)
+ else
+ // read a character - end the scan if there are no further transitions
+ let inp = lexBuffer.Buffer.[lexBuffer.BufferScanPos]
+
+ // Find the new state
+ let snew = lookupUnicodeCharacters (state,inp)
+
+ if snew = sentinel then
+ lexBuffer.EndOfScan()
+ else
+ lexBuffer.BufferScanLength <- lexBuffer.BufferScanLength + 1;
+ // Printf.printf "state %d --> %d on '%c' (%d)\n" s snew (char inp) inp;
+ scanUntilSentinel(lexBuffer,snew)
+
+ // Each row for the Unicode table has format
+ // 128 entries for ASCII characters
+ // A variable number of 2*UInt16 entries for SpecificUnicodeChars
+ // 30 entries, one for each UnicodeCategory
+ // 1 entry for EOF
+
+ member tables.Interpret(initialState,lexBuffer : LexBuffer<char>) =
+ startInterpret(lexBuffer)
+ scanUntilSentinel(lexBuffer, initialState)
+
+ member tables.AsyncInterpret(initialState,lexBuffer : LexBuffer<char>) =
+
+ let rec scanUntilSentinel(lexBuffer, state) =
+ async {
+ // Return an endOfScan after consuming the input
+ let a = int accept.[state]
+ if a <> sentinel then
+ onAccept(lexBuffer,a)
+
+ if lexBuffer.BufferScanLength = lexBuffer.BufferMaxScanLength then
+ lexBuffer.DiscardInput();
+ lexBuffer.RefillBuffer ();
+ // end of file occurs if we couldn't extend the buffer
+ return! afterRefill (trans,sentinel,lexBuffer,scanUntilSentinel,endOfScan,state,eofPos)
+ else
+ // read a character - end the scan if there are no further transitions
+ let inp = lexBuffer.Buffer.[lexBuffer.BufferScanPos]
+
+ // Find the new state
+ let snew = lookupUnicodeCharacters (state,inp)
+
+ if snew = sentinel then
+ return! endOfScan()
+ else
+ lexBuffer.BufferScanLength <- lexBuffer.BufferScanLength + 1;
+ return! scanUntilSentinel(lexBuffer, snew)
+ }
+ and endOfScan() =
+ async { return lexBuffer.EndOfScan() }
+ startInterpret(lexBuffer)
+ scanUntilSentinel(lexBuffer, initialState)
+
+ static member Create(trans,accept) = new UnicodeTables(trans,accept)
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fsi b/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fsi
new file mode 100644
index 0000000..e31ad41
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/Lexing.fsi
@@ -0,0 +1,151 @@
+//==========================================================================
+// LexBuffers are for use with automatically generated lexical analyzers,
+// in particular those produced by 'fslex'.
+//
+// (c) Microsoft Corporation 2005-2008.
+//===========================================================================
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+namespace Internal.Utilities.Text.Lexing
+#else
+namespace Microsoft.FSharp.Text.Lexing
+#endif
+
+open System.Collections.Generic
+
+/// Position information stored for lexing tokens
+//
+// Note: this is an OCaml compat record type.
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal Position =
+#else
+type Position =
+#endif
+ { /// The file name for the position
+ pos_fname: string;
+ /// The line number for the position
+ pos_lnum: int;
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ /// The line number for the position in the original source file
+ pos_orig_lnum : int;
+#endif
+ /// The absolute offset of the beginning of the line
+ pos_bol: int;
+ /// The absolute offset of the column for the position
+ pos_cnum: int; }
+ /// The file name associated with the input stream.
+ member FileName : string
+ /// The line number in the input stream, assuming fresh positions have been updated
+ /// using AsNewLinePos() and by modifying the EndPos property of the LexBuffer.
+ member Line : int
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+ /// The line number for the position in the input stream, assuming fresh positions have been updated
+ /// using AsNewLinePos()
+ member OriginalLine : int
+#endif
+ [<System.ObsoleteAttribute("Use the AbsoluteOffset property instead")>]
+ member Char : int
+ /// The character number in the input stream
+ member AbsoluteOffset : int
+ /// Return absolute offset of the start of the line marked by the position
+ member StartOfLineAbsoluteOffset : int
+ /// Return the column number marked by the position, i.e. the difference between the AbsoluteOffset and the StartOfLineAbsoluteOffset
+ member Column : int
+ // Given a position just beyond the end of a line, return a position at the start of the next line
+ member NextLine : Position
+
+ /// Given a position at the start of a token of length n, return a position just beyond the end of the token
+ member EndOfToken: n:int -> Position
+ /// Gives a position shifted by specified number of characters
+ member ShiftColumnBy: by:int -> Position
+
+ [<System.ObsoleteAttribute("Consider using the NextLine property instead")>]
+ member AsNewLinePos : unit -> Position
+
+ /// Get an arbitrary position, with the empty string as filename, and
+ static member Empty : Position
+
+ /// Get a position corresponding to the first line (line number 1) in a given file
+ static member FirstLine : filename:string -> Position
+
+[<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal LexBuffer<'char> =
+#else
+/// Input buffers consumed by lexers generated by <c>fslex.exe </c>
+type LexBuffer<'char> =
+#endif
+ /// The start position for the lexeme
+ member StartPos: Position with get,set
+ /// The end position for the lexeme
+ member EndPos: Position with get,set
+ /// The matched string
+ member Lexeme: 'char array
+
+ /// Fast helper to turn the matched characters into a string, avoiding an intermediate array
+ static member LexemeString : LexBuffer<char> -> string
+
+ /// The length of the matched string
+ member LexemeLength: int
+ /// Fetch a particular character in the matched string
+ member LexemeChar: int -> 'char
+
+ /// Dynamically typed, non-lexically scoped parameter table
+ member BufferLocalStore : IDictionary<string,obj>
+
+ /// True if the refill of the buffer ever failed , or if explicitly set to true.
+ member IsPastEndOfStream: bool with get,set
+ /// Remove all input, though don't discard the current lexeme
+ member DiscardInput: unit -> unit
+
+ /// Create a lex buffer suitable for byte lexing that reads characters from the given array
+ static member FromBytes: byte[] -> LexBuffer<byte>
+ /// Create a lex buffer suitable for Unicode lexing that reads characters from the given array
+ static member FromChars: char[] -> LexBuffer<char>
+ /// Create a lex buffer suitable for Unicode lexing that reads characters from the given string
+ static member FromString: string -> LexBuffer<char>
+ /// Create a lex buffer that reads character or byte inputs by using the given function
+ static member FromFunction: ('char[] * int * int -> int) -> LexBuffer<'char>
+ /// Create a lex buffer that asynchronously reads character or byte inputs by using the given function
+ static member FromAsyncFunction: ('char[] * int * int -> Async<int>) -> LexBuffer<'char>
+
+
+ [<System.Obsolete("Use LexBuffer<char>.FromFunction instead")>]
+ static member FromCharFunction: (char[] -> int -> int) -> LexBuffer<char>
+ [<System.Obsolete("Use LexBuffer<byte>.FromFunction instead")>]
+ static member FromByteFunction: (byte[] -> int -> int) -> LexBuffer<byte>
+
+ /// Create a lex buffer suitable for use with a Unicode lexer that reads character inputs from the given text reader
+ static member FromTextReader: System.IO.TextReader -> LexBuffer<char>
+ /// Create a lex buffer suitable for use with ASCII byte lexing that reads byte inputs from the given binary reader
+ static member FromBinaryReader: System.IO.BinaryReader -> LexBuffer<byte>
+
+
+/// The type of tables for an ascii lexer generated by fslex.
+[<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal AsciiTables =
+#else
+type AsciiTables =
+#endif
+ static member Create : uint16[] array * uint16[] -> AsciiTables
+ /// Interpret tables for an ascii lexer generated by fslex.
+ member Interpret: initialState:int * LexBuffer<byte> -> int
+ /// Interpret tables for an ascii lexer generated by fslex, processing input asynchronously
+ member AsyncInterpret: initialState:int * LexBuffer<byte> -> Async<int>
+
+
+/// The type of tables for an unicode lexer generated by fslex.
+[<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal UnicodeTables =
+#else
+type UnicodeTables =
+#endif
+ static member Create : uint16[] array * uint16[] -> UnicodeTables
+ /// Interpret tables for a unicode lexer generated by fslex.
+ member Interpret: initialState:int * LexBuffer<char> -> int
+
+ /// Interpret tables for a unicode lexer generated by fslex, processing input asynchronously
+ member AsyncInterpret: initialState:int * LexBuffer<char> -> Async<int>
+
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fs b/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fs
new file mode 100644
index 0000000..523940b
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fs
@@ -0,0 +1,512 @@
+// (c) Microsoft Corporation 2005-2009.
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+
+namespace Internal.Utilities.Text.Parsing
+open Internal.Utilities
+open Internal.Utilities.Text.Lexing
+
+#else
+namespace Microsoft.FSharp.Text.Parsing
+open Microsoft.FSharp.Text.Lexing
+#endif
+
+
+
+open System
+open System.Collections.Generic
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal IParseState =
+#else
+type IParseState =
+#endif
+ abstract InputRange: int -> Position * Position
+ abstract InputEndPosition: int -> Position
+ abstract InputStartPosition: int -> Position
+ abstract ResultRange: Position * Position
+ abstract GetInput: int -> obj
+ abstract ParserLocalStore : IDictionary<string,obj>
+ abstract RaiseError<'b> : unit -> 'b
+
+//-------------------------------------------------------------------------
+// This context is passed to the error reporter when a syntax error occurs
+
+[<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal ParseErrorContext<'tok>
+#else
+type ParseErrorContext<'tok>
+#endif
+ (//lexbuf: LexBuffer<_>,
+ stateStack:int list,
+ parseState: IParseState,
+ reduceTokens: int list,
+ currentToken: 'tok option,
+ reducibleProductions: int list list,
+ shiftableTokens: int list ,
+ message : string) =
+ //member x.LexBuffer = lexbuf
+ member x.StateStack = stateStack
+ member x.ReduceTokens = reduceTokens
+ member x.CurrentToken = currentToken
+ member x.ParseState = parseState
+ member x.ReducibleProductions = reducibleProductions
+ member x.ShiftTokens = shiftableTokens
+ member x.Message = message
+
+
+//-------------------------------------------------------------------------
+// This is the data structure emitted as code by FSYACC.
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal Tables<'tok> =
+#else
+type Tables<'tok> =
+#endif
+ { reductions: (IParseState -> obj) array;
+ endOfInputTag: int;
+ tagOfToken: 'tok -> int;
+ dataOfToken: 'tok -> obj;
+ actionTableElements: uint16[];
+ actionTableRowOffsets: uint16[];
+ reductionSymbolCounts: uint16[];
+ immediateActions: uint16[];
+ gotos: uint16[];
+ sparseGotoTableRowOffsets: uint16[];
+ stateToProdIdxsTableElements: uint16[];
+ stateToProdIdxsTableRowOffsets: uint16[];
+ productionToNonTerminalTable: uint16[];
+ /// For fsyacc.exe, this entry is filled in by context from the generated parser file. If no 'parse_error' function
+ /// is defined by the user then ParseHelpers.parse_error is used by default (ParseHelpers is opened
+ /// at the top of the generated parser file)
+ parseError: ParseErrorContext<'tok> -> unit;
+ numTerminals: int;
+ tagOfErrorTerminal: int }
+
+//-------------------------------------------------------------------------
+// An implementation of stacks.
+
+// This type is in System.dll so for the moment we can't use it in FSharp.Core.dll
+//type Stack<'a> = System.Collections.Generic.Stack<'a>
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type Stack<'a>(n) =
+#else
+type internal Stack<'a>(n) =
+#endif
+ let mutable contents = Array.zeroCreate<'a>(n)
+ let mutable count = 0
+
+ member buf.Ensure newSize =
+ let oldSize = Array.length contents
+ if newSize > oldSize then
+ let old = contents
+ contents <- Array.zeroCreate (max newSize (oldSize * 2));
+ Array.blit old 0 contents 0 count;
+
+ member buf.Count = count
+ member buf.Pop() = count <- count - 1
+ member buf.Peep() = contents.[count - 1]
+ member buf.Top(n) = [ for x in contents.[max 0 (count-n)..count - 1] -> x ] |> List.rev
+ member buf.Push(x) =
+ buf.Ensure(count + 1);
+ contents.[count] <- x;
+ count <- count + 1
+
+ member buf.IsEmpty = (count = 0)
+ member buf.PrintStack() =
+ for i = 0 to (count - 1) do
+#if FX_NO_CONSOLE
+ ()
+#else
+ System.Console.Write("{0}{1}",(contents.[i]),if i=count-1 then ":" else "-")
+#endif
+exception RecoverableParseError
+exception Accept of obj
+
+#if __DEBUG
+module Flags =
+ let mutable debug = false
+#endif
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+module internal Implementation =
+#else
+module Implementation =
+#endif
+
+ // Definitions shared with fsyacc
+ let anyMarker = 0xffff
+ let shiftFlag = 0x0000
+ let reduceFlag = 0x4000
+ let errorFlag = 0x8000
+ let acceptFlag = 0xc000
+ let actionMask = 0xc000
+
+ let actionValue action = action &&& (~~~ actionMask)
+ let actionKind action = action &&& actionMask
+
+ //-------------------------------------------------------------------------
+ // Read the tables written by FSYACC.
+
+ type AssocTable(elemTab:uint16[], offsetTab:uint16[]) =
+ let cache = new Dictionary<_,_>(2000)
+
+ member t.readAssoc (minElemNum,maxElemNum,defaultValueOfAssoc,keyToFind) =
+ // do a binary chop on the table
+ let elemNumber : int = (minElemNum+maxElemNum)/2
+ if elemNumber = maxElemNum
+ then defaultValueOfAssoc
+ else
+ let x = int elemTab.[elemNumber*2]
+ if keyToFind = x then
+ int elemTab.[elemNumber*2+1]
+ elif keyToFind < x then t.readAssoc (minElemNum ,elemNumber,defaultValueOfAssoc,keyToFind)
+ else t.readAssoc (elemNumber+1,maxElemNum,defaultValueOfAssoc,keyToFind)
+
+ member t.Read(rowNumber ,keyToFind) =
+
+ // First check the sparse lookaside table
+ // Performance note: without this lookaside table the binary chop in readAssoc
+ // takes up around 10% of of parsing time
+ // for parsing intensive samples such as the bootstrapped F# compiler.
+ //
+ // Note: using a .NET Dictionary for this int -> int table looks like it could be sub-optimal.
+ // Some other better sparse lookup table may be better.
+ let mutable res = 0
+ let cacheKey = (rowNumber <<< 16) ||| keyToFind
+ let ok = cache.TryGetValue(cacheKey, &res)
+ if ok then res
+ else
+ let headOfTable = int offsetTab.[rowNumber]
+ let firstElemNumber = headOfTable + 1
+ let numberOfElementsInAssoc = int elemTab.[headOfTable*2]
+ let defaultValueOfAssoc = int elemTab.[headOfTable*2+1]
+ let res = t.readAssoc (firstElemNumber,(firstElemNumber+numberOfElementsInAssoc),defaultValueOfAssoc,keyToFind)
+ cache.[cacheKey] <- res
+ res
+
+ // Read all entries in the association table
+ // Used during error recovery to find all valid entries in the table
+ member x.ReadAll(n) =
+ let headOfTable = int offsetTab.[n]
+ let firstElemNumber = headOfTable + 1
+ let numberOfElementsInAssoc = int32 elemTab.[headOfTable*2]
+ let defaultValueOfAssoc = int elemTab.[headOfTable*2+1]
+ [ for i in firstElemNumber .. (firstElemNumber+numberOfElementsInAssoc-1) ->
+ (int elemTab.[i*2], int elemTab.[i*2+1]) ], defaultValueOfAssoc
+
+ type IdxToIdxListTable(elemTab:uint16[], offsetTab:uint16[]) =
+
+ // Read all entries in a row of the table
+ member x.ReadAll(n) =
+ let headOfTable = int offsetTab.[n]
+ let firstElemNumber = headOfTable + 1
+ let numberOfElements = int32 elemTab.[headOfTable]
+ [ for i in firstElemNumber .. (firstElemNumber+numberOfElements-1) -> int elemTab.[i] ]
+
+ //-------------------------------------------------------------------------
+ // interpret the tables emitted by FSYACC.
+
+ [<NoEquality; NoComparison>]
+ [<Struct>]
+ type ValueInfo =
+ val value: obj
+ val startPos: Position
+ val endPos: Position
+ new(value,startPos,endPos) = { value=value; startPos=startPos;endPos=endPos }
+
+ let interpret (tables: Tables<'tok>) lexer (lexbuf : LexBuffer<_>) initialState =
+ let localStore = new Dictionary<string,obj>() in
+ localStore.["LexBuffer"] <- lexbuf;
+#if __DEBUG
+ if Flags.debug then System.Console.WriteLine("\nParser: interpret tables");
+#endif
+ let stateStack : Stack<int> = new Stack<_>(100)
+ stateStack.Push(initialState);
+ let valueStack = new Stack<ValueInfo>(100)
+ let mutable haveLookahead = false
+ let mutable lookaheadToken = Unchecked.defaultof<'tok>
+ let mutable lookaheadEndPos = Unchecked.defaultof<Position>
+ let mutable lookaheadStartPos = Unchecked.defaultof<Position>
+ let mutable finished = false
+ // After an error occurs, we suppress errors until we've shifted three tokens in a row.
+ let mutable errorSuppressionCountDown = 0
+
+ // When we hit the end-of-file we don't fail straight away but rather keep permitting shift
+ // and reduce against the last token in the token stream 20 times or until we've accepted
+ // or exhausted the stack. This allows error recovery rules of the form
+ // input : realInput EOF | realInput error EOF | error EOF
+ // where consuming one EOF to trigger an error doesn't result in overall parse failure
+ // catastrophe and the loss of intermediate results.
+ //
+ let mutable inEofCountDown = false
+ let mutable eofCountDown = 20 // Number of EOFs to supply at the end for error recovery
+ // The 100 here means a maximum of 100 elements for each rule
+ let ruleStartPoss = (Array.zeroCreate 100 : Position array)
+ let ruleEndPoss = (Array.zeroCreate 100 : Position array)
+ let ruleValues = (Array.zeroCreate 100 : obj array)
+ let lhsPos = (Array.zeroCreate 2 : Position array)
+ let reductions = tables.reductions
+ let actionTable = new AssocTable(tables.actionTableElements, tables.actionTableRowOffsets)
+ let gotoTable = new AssocTable(tables.gotos, tables.sparseGotoTableRowOffsets)
+ let stateToProdIdxsTable = new IdxToIdxListTable(tables.stateToProdIdxsTableElements, tables.stateToProdIdxsTableRowOffsets)
+
+ let parseState =
+ { new IParseState with
+ member p.InputRange(n) = ruleStartPoss.[n-1], ruleEndPoss.[n-1];
+ member p.InputStartPosition(n) = ruleStartPoss.[n-1]
+ member p.InputEndPosition(n) = ruleEndPoss.[n-1];
+ member p.GetInput(n) = ruleValues.[n-1];
+ member p.ResultRange = (lhsPos.[0], lhsPos.[1]);
+ member p.ParserLocalStore = (localStore :> IDictionary<_,_>);
+ member p.RaiseError() = raise RecoverableParseError (* NOTE: this binding tests the fairly complex logic associated with an object expression implementing a generic abstract method *)
+ }
+
+#if __DEBUG
+ let report haveLookahead lookaheadToken =
+ if haveLookahead then sprintf "%A" lookaheadToken
+ else "[TBC]"
+#endif
+
+ // Pop the stack until we can shift the 'error' token. If 'tokenOpt' is given
+ // then keep popping until we can shift both the 'error' token and the token in 'tokenOpt'.
+ // This is used at end-of-file to make sure we can shift both the 'error' token and the 'EOF' token.
+ let rec popStackUntilErrorShifted(tokenOpt) =
+ // Keep popping the stack until the "error" terminal is shifted
+#if __DEBUG
+ if Flags.debug then System.Console.WriteLine("popStackUntilErrorShifted");
+#endif
+ if stateStack.IsEmpty then
+#if __DEBUG
+ if Flags.debug then
+ System.Console.WriteLine("state stack empty during error recovery - generating parse error");
+#endif
+ failwith "parse error";
+
+ let currState = stateStack.Peep()
+#if __DEBUG
+ if Flags.debug then
+ System.Console.WriteLine("In state {0} during error recovery", currState);
+#endif
+
+ let action = actionTable.Read(currState, tables.tagOfErrorTerminal)
+
+ if actionKind action = shiftFlag &&
+ (match tokenOpt with
+ | None -> true
+ | Some(token) ->
+ let nextState = actionValue action
+ actionKind (actionTable.Read(nextState, tables.tagOfToken(token))) = shiftFlag) then
+
+#if __DEBUG
+ if Flags.debug then System.Console.WriteLine("shifting error, continuing with error recovery");
+#endif
+ let nextState = actionValue action
+ // The "error" non terminal needs position information, though it tends to be unreliable.
+ // Use the StartPos/EndPos from the lex buffer
+ valueStack.Push(ValueInfo(box (), lexbuf.StartPos, lexbuf.EndPos));
+ stateStack.Push(nextState)
+ else
+ if valueStack.IsEmpty then
+ failwith "parse error";
+#if __DEBUG
+ if Flags.debug then
+ System.Console.WriteLine("popping stack during error recovery");
+#endif
+ valueStack.Pop();
+ stateStack.Pop();
+ popStackUntilErrorShifted(tokenOpt)
+
+ while not finished do
+ if stateStack.IsEmpty then
+ finished <- true
+ else
+ let state = stateStack.Peep()
+#if __DEBUG
+ if Flags.debug then (Console.Write("{0} value(state), state ",valueStack.Count); stateStack.PrintStack())
+#endif
+ let action =
+ let immediateAction = int tables.immediateActions.[state]
+ if not (immediateAction = anyMarker) then
+ // Action has been pre-determined, no need to lookahead
+ // Expecting it to be a Reduce action on a non-fakeStartNonTerminal ?
+ immediateAction
+ else
+ // Lookahead required to determine action
+ if not haveLookahead then
+ if lexbuf.IsPastEndOfStream then
+ // When the input runs out, keep supplying the last token for eofCountDown times
+ if eofCountDown>0 then
+ haveLookahead <- true
+ eofCountDown <- eofCountDown - 1
+ inEofCountDown <- true
+ else
+ haveLookahead <- false
+ else
+ lookaheadToken <- lexer lexbuf
+ lookaheadStartPos <- lexbuf.StartPos
+ lookaheadEndPos <- lexbuf.EndPos
+ haveLookahead <- true;
+
+ let tag =
+ if haveLookahead then tables.tagOfToken lookaheadToken
+ else tables.endOfInputTag
+
+ // Printf.printf "state %d\n" state
+ actionTable.Read(state,tag)
+
+ let kind = actionKind action
+ if kind = shiftFlag then (
+ if errorSuppressionCountDown > 0 then
+ errorSuppressionCountDown <- errorSuppressionCountDown - 1;
+#if __DEBUG
+ if Flags.debug then Console.WriteLine("shifting, reduced errorRecoverylevel to {0}\n", errorSuppressionCountDown);
+#endif
+ let nextState = actionValue action
+ if not haveLookahead then failwith "shift on end of input!";
+ let data = tables.dataOfToken lookaheadToken
+ valueStack.Push(ValueInfo(data, lookaheadStartPos, lookaheadEndPos));
+ stateStack.Push(nextState);
+#if __DEBUG
+ if Flags.debug then Console.WriteLine("shift/consume input {0}, shift to state {1}", report haveLookahead lookaheadToken, nextState);
+#endif
+ haveLookahead <- false
+
+ ) elif kind = reduceFlag then
+ let prod = actionValue action
+ let reduction = reductions.[prod]
+ let n = int tables.reductionSymbolCounts.[prod]
+ // pop the symbols, populate the values and populate the locations
+#if __DEBUG
+ if Flags.debug then Console.Write("reduce popping {0} values/states, lookahead {1}", n, report haveLookahead lookaheadToken);
+#endif
+
+ lhsPos.[0] <- Position.Empty;
+ lhsPos.[1] <- Position.Empty;
+ for i = 0 to n - 1 do
+ if valueStack.IsEmpty then failwith "empty symbol stack";
+ let topVal = valueStack.Peep()
+ valueStack.Pop();
+ stateStack.Pop();
+ ruleValues.[(n-i)-1] <- topVal.value;
+ ruleStartPoss.[(n-i)-1] <- topVal.startPos;
+ ruleEndPoss.[(n-i)-1] <- topVal.endPos;
+ if lhsPos.[1] = Position.Empty then lhsPos.[1] <- topVal.endPos;
+ if not (topVal.startPos = Position.Empty) then lhsPos.[0] <- topVal.startPos
+ done;
+
+ try
+ // Printf.printf "reduce %d\n" prod;
+ let redResult = reduction parseState
+ valueStack.Push(ValueInfo(redResult, lhsPos.[0], lhsPos.[1]));
+ let currState = stateStack.Peep()
+ let newGotoState = gotoTable.Read(int tables.productionToNonTerminalTable.[prod], currState)
+ stateStack.Push(newGotoState)
+#if __DEBUG
+ if Flags.debug then Console.WriteLine(" goto state {0}", newGotoState)
+#endif
+ with
+ | Accept res ->
+ finished <- true;
+ valueStack.Push(ValueInfo(res, lhsPos.[0], lhsPos.[1]))
+ | RecoverableParseError ->
+#if __DEBUG
+ if Flags.debug then Console.WriteLine("RecoverableParseErrorException...\n");
+#endif
+ popStackUntilErrorShifted(None);
+ // User code raised a Parse_error. Don't report errors again until three tokens have been shifted
+ errorSuppressionCountDown <- 3
+ elif kind = errorFlag then (
+#if __DEBUG
+ if Flags.debug then Console.Write("ErrorFlag... ");
+#endif
+ // Silently discard inputs and don't report errors
+ // until three tokens in a row have been shifted
+#if __DEBUG
+ if Flags.debug then printfn "error on token '%A' " (if haveLookahead then Some(lookaheadToken) else None);
+#endif
+ if errorSuppressionCountDown > 0 then
+ // If we're in the end-of-file count down then we're very keen to 'Accept'.
+ // We can only do this by repeatedly popping the stack until we can shift both an 'error' token
+ // and an EOF token.
+ if inEofCountDown && eofCountDown < 10 then
+#if __DEBUG
+ if Flags.debug then printfn "poppin stack, lokking to shift both 'error' and that token, during end-of-file error recovery" ;
+#endif
+ popStackUntilErrorShifted(if haveLookahead then Some(lookaheadToken) else None);
+
+ // If we don't haveLookahead then the end-of-file count down is over and we have no further options.
+ if not haveLookahead then
+ failwith "parse error: unexpected end of file"
+
+#if __DEBUG
+ if Flags.debug then printfn "discarding token '%A' during error suppression" (if haveLookahead then Some(lookaheadToken) else None);
+#endif
+ // Discard the token
+ haveLookahead <- false
+ // Try again to shift three tokens
+ errorSuppressionCountDown <- 3
+ else (
+
+ let currentToken = if haveLookahead then Some(lookaheadToken) else None
+ let actions,defaultAction = actionTable.ReadAll(state)
+ let explicit = Set.ofList [ for (tag,_action) in actions -> tag ]
+
+ let shiftableTokens =
+ [ for (tag,action) in actions do
+ if (actionKind action) = shiftFlag then
+ yield tag
+ if actionKind defaultAction = shiftFlag then
+ for tag in 0 .. tables.numTerminals-1 do
+ if not (explicit.Contains(tag)) then
+ yield tag ] in
+
+ let stateStack = stateStack.Top(12) in
+ let reducibleProductions =
+ [ for state in stateStack do
+ yield stateToProdIdxsTable.ReadAll(state) ]
+
+ let reduceTokens =
+ [ for (tag,action) in actions do
+ if actionKind(action) = reduceFlag then
+ yield tag
+ if actionKind(defaultAction) = reduceFlag then
+ for tag in 0 .. tables.numTerminals-1 do
+ if not (explicit.Contains(tag)) then
+ yield tag ] in
+ //let activeRules = stateStack |> List.iter (fun state ->
+ let errorContext = new ParseErrorContext<'tok>(stateStack,parseState, reduceTokens,currentToken,reducibleProductions, shiftableTokens, "syntax error")
+ tables.parseError(errorContext);
+ popStackUntilErrorShifted(None);
+ errorSuppressionCountDown <- 3;
+#if __DEBUG
+ if Flags.debug then System.Console.WriteLine("generated syntax error and shifted error token, haveLookahead = {0}\n", haveLookahead);
+#endif
+ )
+ ) elif kind = acceptFlag then
+ finished <- true
+#if __DEBUG
+ else
+ if Flags.debug then System.Console.WriteLine("ALARM!!! drop through case in parser");
+#endif
+ done;
+ // OK, we're done - read off the overall generated value
+ valueStack.Peep().value
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal Tables<'tok> with
+#else
+type Tables<'tok> with
+#endif
+ member tables.Interpret (lexer,lexbuf,initialState) =
+ Implementation.interpret tables lexer lexbuf initialState
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+module internal ParseHelpers =
+#else
+module ParseHelpers =
+#endif
+ let parse_error (_s:string) = ()
+ let parse_error_rich = (None : (ParseErrorContext<_> -> unit) option)
diff --git a/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fsi b/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fsi
new file mode 100644
index 0000000..2fef459
--- /dev/null
+++ b/lib/bootstrap/src/FsLexYacc.Runtime/Parsing.fsi
@@ -0,0 +1,130 @@
+//==========================================================================
+// (c) Microsoft Corporation 2005-2009.
+//=========================================================================
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+namespace Internal.Utilities.Text.Parsing
+open Internal.Utilities
+open Internal.Utilities.Text.Lexing
+#else
+namespace Microsoft.FSharp.Text.Parsing
+open Microsoft.FSharp.Text.Lexing
+#endif
+
+open System.Collections.Generic
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal IParseState =
+#else
+/// The information accessible via the <c>parseState</c> value within parser actions.
+type IParseState =
+#endif
+ /// Get the start and end position for the terminal or non-terminal at a given index matched by the production
+ abstract InputRange: index:int -> Position * Position
+ /// Get the end position for the terminal or non-terminal at a given index matched by the production
+ abstract InputEndPosition: int -> Position
+ /// Get the start position for the terminal or non-terminal at a given index matched by the production
+ abstract InputStartPosition: int -> Position
+ /// Get the full range of positions matched by the production
+ abstract ResultRange: Position * Position
+ /// Get the value produced by the terminal or non-terminal at the given position
+ abstract GetInput : int -> obj
+ /// Get the store of local values associated with this parser
+ // Dynamically typed, non-lexically scoped local store
+ abstract ParserLocalStore : IDictionary<string,obj>
+ /// Raise an error in this parse context
+ abstract RaiseError<'b> : unit -> 'b
+
+
+[<Sealed>]
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal ParseErrorContext<'tok> =
+#else
+/// The context provided when a parse error occurs
+type ParseErrorContext<'tok> =
+#endif
+ /// The stack of state indexes active at the parse error
+ member StateStack : int list
+ /// The state active at the parse error
+ member ParseState : IParseState
+ /// The tokens that would cause a reduction at the parse error
+ member ReduceTokens: int list
+ /// The stack of productions that would be reduced at the parse error
+ member ReducibleProductions : int list list
+ /// The token that caused the parse error
+ member CurrentToken : 'tok option
+ /// The token that would cause a shift at the parse error
+ member ShiftTokens : int list
+ /// The message associated with the parse error
+ member Message : string
+
+/// Tables generated by fsyacc
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+type internal Tables<'tok> =
+#else
+/// The type of the tables contained in a file produced by the fsyacc.exe parser generator.
+type Tables<'tok> =
+#endif
+ { /// The reduction table
+ reductions: (IParseState -> obj) array ;
+ /// The token number indicating the end of input
+ endOfInputTag: int;
+ /// A function to compute the tag of a token
+ tagOfToken: 'tok -> int;
+ /// A function to compute the data carried by a token
+ dataOfToken: 'tok -> obj;
+ /// The sparse action table elements
+ actionTableElements: uint16[];
+ /// The sparse action table row offsets
+ actionTableRowOffsets: uint16[];
+ /// The number of symbols for each reduction
+ reductionSymbolCounts: uint16[];
+ /// The immediate action table
+ immediateActions: uint16[];
+ /// The sparse goto table
+ gotos: uint16[];
+ /// The sparse goto table row offsets
+ sparseGotoTableRowOffsets: uint16[];
+ /// The sparse table for the productions active for each state
+ stateToProdIdxsTableElements: uint16[];
+ /// The sparse table offsets for the productions active for each state
+ stateToProdIdxsTableRowOffsets: uint16[];
+ /// This table is logically part of the Goto table
+ productionToNonTerminalTable: uint16[];
+ /// This function is used to hold the user specified "parse_error" or "parse_error_rich" functions
+ parseError: ParseErrorContext<'tok> -> unit;
+ /// The total number of terminals
+ numTerminals: int;
+ /// The tag of the error terminal
+ tagOfErrorTerminal: int }
+
+ /// Interpret the parser table taking input from the given lexer, using the given lex buffer, and the given start state.
+ /// Returns an object indicating the final synthesized value for the parse.
+ member Interpret : lexer:(LexBuffer<'char> -> 'tok) * lexbuf:LexBuffer<'char> * startState:int -> obj
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+exception internal Accept of obj
+exception internal RecoverableParseError
+#else
+/// Indicates an accept action has occured
+exception Accept of obj
+/// Indicates a parse error has occured and parse recovery is in progress
+exception RecoverableParseError
+#endif
+
+#if __DEBUG
+module internal Flags =
+ val mutable debug : bool
+#endif
+
+#if INTERNALIZED_FSLEXYACC_RUNTIME
+module internal ParseHelpers =
+#else
+/// Helpers used by generated parsers.
+module ParseHelpers =
+#endif
+ /// The default implementation of the parse_error_rich function
+ val parse_error_rich: (ParseErrorContext<'tok> -> unit) option
+ /// The default implementation of the parse_error function
+ val parse_error: string -> unit
+
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-cli-apps/packages/fsharp.git
More information about the Pkg-cli-apps-commits
mailing list